花里胡哨职场java常用小技巧


list去掉某一元素,其余元素相同去重
List<EcologyRelation> collect = value.stream().collect(Collectors.collectingAndThen(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(f -> f.getFileId() + f.getComponentId() + f.getUserId() + f.getModelId() + f.getProjectId() + f.getModelName()))), ArrayList::new));
HashSet<EcologyRelation> set=new HashSet<>();
set.addAll(collect);

 

 

 

 

for (Map.Entry entry : map.entrySet()) {
//文档id
Integer fileId = (Integer) entry.getKey();
List<EcologyRelation> value = (List<EcologyRelation>) entry.getValue();

//设置模型名称
for (EcologyRelation e : value) {
if (e.getModelId() != null) {
e.setModelName(MODEL_ID_NAME_MAP.get((long) e.getModelId()));
}
}
JSONObject jsonObject = new JSONObject();
jsonObject.put("fid", fileId);
jsonObject.put("status", 1);
jsonObject.put("ecologyRelation", value);
m.put(fileId, jsonObject);
}

 

 

mybatis

直接返回一个list集合,不用写那么多的vo接收具体的参数

dao  :  List<Map<String, Object>> getTree(@Param("list")String list);

xml:    <select id="getTree" resultType="java.util.HashMap">

      select cast(id as char) as id , group_concat(distinct id,':',name) as rid,concat(name,'/',IFNULL(age,' ')) as info from  tablename where  id in(${list}) group id
</select>
这条查询的意思是查询list,例如(1,2,3)这三个id,以它们分组查询,查询id,分组查询rid, name和age 连接起来
结果为
id rid   info
1 1:name name/age
2 2:name name/age
3 3:name name/age
cast(id as char) as id  ,当出现id格式报错时写成这样,正常不会出现这种报错
group_concat(distinct id,':',name) 分组查询 distinct 去重 这一条输出的正常后面是不会有分组的,它本身就是分组,(我是懒的写两条sql)
concat(name,'/',IFNULL(age,'')) 连接多个字段值
ifNull(max(label_id),0) 查询函数大值时,尽量吧ifNull带上,防止出问题,偶发性的问题也要避免


//批量删除
int deleteBatch(@Param("list")List<Integer> list, @Param("tableName") String tableName);

<delete id="deleteBatch" parameterType="list">
delete from ${tableName} where
id in
<foreach collection="list" item="e" separator="," open="(" close=")" index="index">
#{e}
</foreach>
</delete>
//模糊查询
select * from ${tableName} where name like concat('%', concat(#{name},'%'))

根据条件创建表并复制部分表数据
Create table ${newTableName} SELECT * FROM ${tableName} where model_id in (${list})
根据条件复制表数据
INSERT INTO ${newTableName} SELECT * FROM ${tableName} where model_id in (${list})

批量新增
insert into ${tableName} (user_id,project_id,model_id)
values
<foreach collection="list" item="item" index="index" separator=",">
(#{item.userId},#{item.projectId},#{item.modelId})
</foreach>




QueryWrapper<实体类> wrapper=new QueryWrapper<>(); 的使用

wrapper.eq("label_id",lableId);

  service.函数方法


IPage<HistoryRecordsInfo> page = new Page<>(current, size);

LambdaQueryWrapper<实体类> lambdaQueryWrapper = Wrappers.<实体类>lambdaQuery().eq(实体类::getId, 传参id).between(实体类::get时间,getStartOfDay(传参时间),getEndOfDay(传参时间)).orderByDesc(实体类::get时间);
baseMapper.selectPage(page, lambdaQueryWrapper);

 QueryWrapper userQueryWrapper = new QueryWrapper();

 Map<String, Object> map = new HashMap<>();
map.put("user_id", userId);
map.put("project_id", projectId);
map.put("model_id", modelId);
userQueryWrapper.allEq(map);

userQueryWrapper.like(true, "name", name);

userQueryWrapper.eq(true, "component_id", componentId);



public static void main(String[] args) {
String startStr = "2020-08-26";
String endStr = "2022-02-26";
List<String> list = getYearBetweenDate(startStr, endStr);
System.out.println(list);
}
public static List<String> getMonthBetweenDate(String startTime, String endTime){
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM");
// 声明保存日期集合
List<String> list = new ArrayList<String>();
try {
// 转化成日期类型
Date startDate = sdf.parse(startTime);
Date endDate = sdf.parse(endTime);

//用Calendar 进行日期比较判断
Calendar calendar = Calendar.getInstance();
while (startDate.getTime()<=endDate.getTime()){
// 把日期添加到集合
list.add(sdf.format(startDate));
// 设置日期
calendar.setTime(startDate);
//把日期增加一天
calendar.add(Calendar.MONTH, 1);
// 获取增加后的日期
startDate=calendar.getTime();
}
} catch (ParseException e) {
e.printStackTrace();
}
return list;
}
public static List<String> getYearBetweenDate(String startTime, String endTime){
SimpleDateFormat sdf = new SimpleDateFormat("yyyy");
// 声明保存日期集合
List<String> list = new ArrayList<String>();
try {
// 转化成日期类型
Date startDate = sdf.parse(startTime);
Date endDate = sdf.parse(endTime);

//用Calendar 进行日期比较判断
Calendar calendar = Calendar.getInstance();
while (startDate.getTime()<=endDate.getTime()){
// 把日期添加到集合
list.add(sdf.format(startDate));
// 设置日期
calendar.setTime(startDate);
//把日期增加一天
calendar.add(Calendar.YEAR, 1);
// 获取增加后的日期
startDate=calendar.getTime();
}
} catch (ParseException e) {
e.printStackTrace();
}
return list;
}


、、、分页
Page<EcologyUrlType> page = new Page<EcologyUrlType>(current, size);
IPage<EcologyUrlType> getUrlTypePageList = baseMapper.getUrlTypePageList(page, projectId);
 
IPage<EcologyUrlType> getUrlTypePageList(Page<EcologyUrlType> page,@Param("projectId")Integer projectId);




常用的方法

//获取一天中的开始时间
public static Date getStartOfDay(String startDate) {
ZoneId zone = ZoneId.systemDefault();
LocalDate localDate = Instant.ofEpochMilli(Long.valueOf(startDate)).atZone(ZoneOffset.ofHours(8)).toLocalDate();
LocalDateTime localDateTime = LocalDateTime.of(localDate, LocalTime.MIN);
LocalDateTime startOfDay = localDateTime.with(LocalTime.MIN);
return Date.from(startOfDay.atZone(zone).toInstant());
}
//获取一天中的结束时间
public static Date getEndOfDay(String endDate) {
ZoneId zone = ZoneId.systemDefault();
LocalDate localDate = Instant.ofEpochMilli(Long.valueOf(endDate)).atZone(ZoneOffset.ofHours(8)).toLocalDate();
LocalDateTime localDateTime = LocalDateTime.of(localDate, LocalTime.MAX);
LocalDateTime endOfDay = localDateTime.with(LocalTime.MAX);
return Date.from(endOfDay.atZone(zone).toInstant());
}
//生成静态文件
public String generateResourceFile() {
/** 开始生成文件 **/
String dir = langDeskPath;
File d = new File(dir);
if (!d.exists()) {
d.mkdirs();
}
String file = dir + "message.js";
File f = new File(file);
if (f.exists()) {
f.delete();
}
try {
f.createNewFile();
List<String> contentList = new ArrayList<>();
contentList.add("var bdipLang = {\n\tlangType: 'ZH_CN' ,");
contentList.add("\tgetLang: function(id){\n\t\treturn this[this.langType][id];\n\t},");
contentList.add("ZH_CN:{\t\t");
List<CloudLanguageInfo> collect = resourceData.stream().filter(item -> item.getLanguageId() == 7).collect(Collectors.toList());
for (CloudLanguageInfo info : collect) {
contentList.add(new StringBuffer()
.append("\t\"").append(info.getLabelId())
.append("\":\"").append(info.getLabelValue())
.append("\",").toString());
}
contentList.add("\t},\nZH_TW:{\t\t");
List<CloudLanguageInfo> collect1 = resourceData.stream().filter(item -> item.getLanguageId() == 9).collect(Collectors.toList());
for (CloudLanguageInfo info : collect1) {
contentList.add(new StringBuffer()
.append("\t\"").append(info.getLabelId())
.append("\":\"").append(info.getLabelValue())
.append("\",").toString());
}
contentList.add("\t},\nEN:{\t\t");
List<CloudLanguageInfo> collect2 = resourceData.stream().filter(item -> item.getLanguageId() == 8).collect(Collectors.toList());
for (CloudLanguageInfo info : collect2) {
contentList.add(new StringBuffer()
.append("\t\"").append(info.getLabelId())
.append("\":\"").append(info.getLabelValue())
.append("\",").toString());
}
contentList.add("\t},");
contentList.add("}");
FileUtil.writeUtf8Lines(contentList, f);
} catch (IOException e) {
e.printStackTrace();
}
return file;

}

分页
public class ListPaging{

/**
* 数据集合的分页方法,根据传入总共的数据跟页码,返回页码所需要显示多少条的数据
* <BR/>采用泛型的方法,即为,list中为什么类型的数据就返回什么类型的数据
* @param f 带有需要进行分页的数据集合
* @param pageNo 第几页
* @param dataSize 显示多少条数据
* @return 进过分页之后返回的数据
*/
public static <F> List<F> datepaging(List<F> f, int pageNo, int dataSize) {
/*
* 经过测试发现当pageNo为0或者小于时,也就是第0页时,程序会报错,所以需要处理一下pageNo的值
*
* 先进行空值的判断,避免程序出现null异常
*
* 当pageNo的值小于等于0时,我们让它的值为1
*/
//参数的校验
if (f == null) {//当传入过来的list集合为null时,先进行实例化
f = new ArrayList<F>();
}
if ((Object) pageNo == null) {//当传入过来的pageNo为null时,先进行赋值操作
pageNo = 1;
}
if ((Object) dataSize == null) {//当传入过来的dataSize为null时,先进行赋值操作
dataSize = 1;
}
if (pageNo <= 0) {
pageNo = 1;
}

//记录一下数据一共有多少条
int totalitems = f.size();
//实例化一个接受分页处理之后的数据
List<F> afterList = new ArrayList<F>();
/*
* 进行分页处理,采用for循环的方式来进行处理
*
* 首先for循环中,i应该从哪里开始:i应该从 (当前是第几页 -1 乘以 条数) 开始
*
* 然后for循环应该到哪里结束,也就是i应该小于:判断(开始的索引+显示条数)是不是大于总条数,如果大于就是总条数,如果小于就是(开始的索引+显示条数)
*
* 然后让i++
*/

for
(int i = (pageNo - 1) * dataSize;
i < (((pageNo - 1) * dataSize) + dataSize >
totalitems ? totalitems : ((pageNo - 1) * dataSize) + dataSize);
i++) {
//然后将数据存入afterList中

afterList.add(f.get(i));
}

//然后将处理后的数据集合进行返回
return afterList;
}




// 返回对象集合以类属性一升序排序
list.stream().sorted(Comparator.comparing(类::属性一));
// 返回对象集合以类属性一降序排序 注意两种写法
list.stream().sorted(Comparator.comparing(类::属性一).reversed());//先以属性一升序,结果进行属性一降序
list.stream().sorted(Comparator.comparing(类::属性一,Comparator.reverseOrder()));//以属性一降序
// 返回对象集合先以类属性一升序,再以属性二升序
list.stream().sorted(Comparator.comparing(类::属性一).thenComparing(类::属性二));
// 返回 对象集合以类属性一降序 属性二升序 注意两种写法
// 先以属性一升序,升序结果进行属性一降序,再进行属性二升序
list.stream().sorted(Comparator.comparing(类::属性一).reversed().thenComparing(类::属性二));
// 先以属性一降序,再进行属性二升序
list.stream().sorted(Comparator.comparing(类::属性一,Comparator.reverseOrder()).thenComparing(类::属性二));
// 返回 对象集合以类属性一降序 属性二降序 注意两种写法
// 先以属性一升序,升序结果进行属性一降序,再进行属性二降序
list.stream().sorted(Comparator.comparing(类::属性一).reversed().thenComparing(类::属性二,Comparator.reverseOrder()));
// 先以属性一降序,再进行属性二降序
list.stream().sorted(Comparator.comparing(类::属性一,Comparator.reverseOrder()).thenComparing(类::属性二,Comparator.reverseOrder()));
// 返回 对象集合以类属性一升序 属性二降序 注意两种写法
// 先以属性一升序,升序结果进行属性一降序,再进行属性二升序,结果进行属性一降序属性二降序
list.stream().sorted(Comparator.comparing(类::属性一).reversed().thenComparing(类::属性二).reversed());
// 先以属性一升序,再进行属性二降序 list.stream().sorted(Comparator.comparing(类::属性一).thenComparing(类::属性二,Comparator.reverseOrder()));




//内部调用接口
@FeignClient(name = "项目模块名", configuration = FeignConfig.class)
public interface OauthClient {

接口名
@PostMapping("/oauth/token")
CommonResult postAccessToken(
@ApiIgnore Principal principal,
@RequestParam(name = "client_id") String client_id,
@RequestParam(name = "client_secret") String client_secret,
@RequestParam(name = "grant_type") String grant_type,
@RequestParam(name = "username") String username,
@RequestParam(name = "password") String password,
@RequestParam(name = "refresh_token",required = false) String refresh_token
);
}



;

@Data
public class ProjectContext {
public static final String CONTEXT_KEY = "CONTEXT_KEY";
private static final String DEFAULT_SPAN = "1";
private static final Random RANDOM = new Random();

/**
* 每次请求唯一记录值
*/
private String traceId;

/**
* 一次请求的多次处理唯一标记
*/
private String spanId;

/**
* 请求ip
*/
private String ip;

private static ThreadLocal<ProjectContext> LOCAL = new TransmittableThreadLocal<>();

public static ProjectContext getContext() {
ProjectContext context = LOCAL.get();
if (Objects.isNull(context)) {
context = new ProjectContext();
}

return context;
}

static void nextSpan() {
if (Objects.isNull(getContext())) {
initContext();
return;
}
if (Objects.isNull(getContext().getSpanId())) {
getContext().setSpanId(DEFAULT_SPAN);
return;
}

// 获取当前的spanId
String span = getContext().getSpanId();
if (span.endsWith(".")) {
span = span.substring(0, span.length() - 1);
}
// 找到切割位置
int p = span.lastIndexOf(".");
String last = span.substring(p + 1);
// 最后需要自增的原数据
int lastId = Integer.parseInt(last);
// 完成自增并设置设置到spanId中
if (p < 0) {
getContext().setSpanId(String.valueOf(lastId + 1));
} else {
getContext().setSpanId(span.substring(0, p) + (lastId + 1));
}
}

/**
* 透传上下文
*
* @param contextString 被序列化的上下文字符串
*/
public static void fromString(String contextString) {
ProjectContext context = GsonUtil.toBean(contextString, ProjectContext.class);

fromContext(context);
}

public static void fromContext(ProjectContext context) {
LOCAL.set(context);

nextSpan();
}

static void setContext(ProjectContext context) {
LOCAL.set(context);
}

public static void initContext(String ip) {
initContext();

ProjectContext context = getContext();

context.setIp(ip);

setContext(context);
}

public static ProjectContext initContext() {
ProjectContext context = new ProjectContext();
context.setTraceId(getTraceForUse());
context.setSpanId(DEFAULT_SPAN);

setContext(context);
return context;
}

public void release() {

}

public static String getTraceForUse() {
return String.valueOf(SnowflakeIdWorker.nextId()).replaceAll("-", "");
}

@Override
public String toString() {
return GsonUtil.toJson(this);
}
}
 


@Slf4j
@ConditionalOnClass(Feign.class)
@AutoConfigureBefore(FeignAutoConfiguration.class)
public class FeignConfig {
@Bean
public RequestInterceptor requestInterceptor() {
return requestTemplate -> {
try {

ProjectContext projectContext = ProjectContext.getContext();
if (Objects.nonNull(projectContext) && !"{}".equals(projectContext.toString())) {
requestTemplate.header("CONTEXT_KEY", new String[]{projectContext.toString()});
}

String feignClientName = requestTemplate.feignTarget().name();
System.out.println(requestTemplate.request().url());
requestTemplate.uri("/" + feignClientName + requestTemplate.request().url());
//还可以改head加token
//HttpServletRequest request = attributes.getRequest();
// String token = request.getHeaderNames(CommonC)

} catch (Exception e) {
//需要加一个服务降级处理
e.printStackTrace();
}

};
}

public String getTomcatPort() throws Exception {
MBeanServer beanServer = ManagementFactory.getPlatformMBeanServer();
String port = "";
try {
Set<ObjectName> objectNames = beanServer.queryNames(new ObjectName("*:type=Connector,*"), Query.match(Query.attr("protocol"), Query.value("HTTP/1.1")));
port = objectNames.iterator().next().getKeyProperty("port");
} catch (Exception e) {
System.out.println("正在使用非外部tomcat的方式启动,会有一个查询异常,请忽略");
}
return port;
}
}
//发送短信验证码

@PostMapping("/sendMsg")
public CommonResult sendMsg(@RequestParam(name="mobile",required = false) String mobile) {
//生成随机数
String code = RandomUtil.getFourBitRandom();
Map map = new HashMap();
map.put("code",code);
return service.senMsg(map,mobile);
}
CommonResult senMsg(Map map, String mobile);

@Override
public CommonResult senMsg(Map map, String mobile) {
int count=baseMapper.getHaveUser(mobile);
if(StringUtils.isEmpty(mobile))
return CommonResult.success(false);
//导包
Config config = new Config()
// 您的AccessKey ID
.setAccessKeyId("LTAITi8GKWXkGisu")
// 您的AccessKey Secret
.setAccessKeySecret("j5EJXU4g2gyjLZdahuwm3BtoIHM8BF");
// 访问的域名
config.endpoint = "dysmsapi.aliyuncs.com";
Client client = null;
try {
client = new Client(config);
SendSmsRequest request = new SendSmsRequest();
request.setSignName("申昆路停车场项目");//签名名称
if(count==0){
request.setTemplateCode("SMS_243265772");//注册模板Code
}
else{
request.setTemplateCode("SMS_243230822");//登录模板Code
}
request.setPhoneNumbers(mobile);//电话号码
//这里的参数是json格式的字符串
request.setTemplateParam(JSONObject.toJSONString(map));
SendSmsResponse response = client.sendSms(request);
System.out.println("发送成功:"+new Gson().toJson(response));
return CommonResult.success(map.get("code"));
} catch (Exception e) {
e.printStackTrace();
return CommonResult.success(false);
}

}

package com.bdip.util;

import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Random;

/**
* 获取随机数
*
*/
public class RandomUtil {

private static final Random random = new Random();

private static final DecimalFormat fourdf = new DecimalFormat("0000");

private static final DecimalFormat sixdf = new DecimalFormat("000000");

public static String getFourBitRandom() {
return fourdf.format(random.nextInt(10000));
}

public static String getSixBitRandom() {
return sixdf.format(random.nextInt(1000000));
}

/**
* 给定数组,抽取n个数据
* @param list
* @param n
* @return
*/
public static ArrayList getRandom(List list, int n) {

Random random = new Random();

HashMap<Object, Object> hashMap = new HashMap<Object, Object>();

// 生成随机数字并存入HashMap
for (int i = 0; i < list.size(); i++) {

int number = random.nextInt(100) + 1;

hashMap.put(number, i);
}

// 从HashMap导入数组
Object[] robjs = hashMap.values().toArray();

ArrayList r = new ArrayList();

// 遍历数组并打印数据
for (int i = 0; i < n; i++) {
r.add(list.get((int) robjs[i]));
System.out.print(list.get((int) robjs[i]) + "\t");
}
System.out.print("\n");
return r;
}
}

、//父子树结构


package com.bdip.domain.shlq;

import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.extension.activerecord.Model;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonIgnore;
import lombok.Data;

import java.util.Date;
import java.util.Map;

/**
* 进度Entity基类
*/
@Data
public abstract class DataInfo<T extends Model<?>> extends Model<T>
{

/**主键ID*/
@TableId(type = IdType.ASSIGN_ID)
protected String id;

/** 创建者 */
@TableField(fill = FieldFill.INSERT)
protected String createBy;

/** 创建者名称 */
@TableField(exist = false)
protected String createName;

/** 创建时间 */
@TableField(fill = FieldFill.INSERT)
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
protected Date createTime;

/** 更新者 */
@TableField(fill = FieldFill.UPDATE)
protected String updateBy;

/** 更新者名称 */
@TableField(exist = false)
protected String updateName;

/** 更新时间 */
@TableField(fill = FieldFill.INSERT_UPDATE)
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
protected Date updateTime;

/** 备注 */
@JsonIgnore
protected String remark;

/** 是否删除 */
@JsonIgnore
protected String isdeleted = "0";

/** 搜索值 */
@JsonIgnore
@TableField(exist = false)
protected String search;

/** 请求参数 */
@JsonIgnore
@TableField(exist = false)
protected Map<String, Object> params;

}


package com.bdip.domain.shlq;

import lombok.Data;

import java.util.ArrayList;
import java.util.List;

/**
* 进度Tree基类
*/
@Data
public abstract class TreeInfo<T extends TreeInfo<?>> extends DataInfo<T>
{

/** 父菜单ID */
protected String parentId;

/** 祖级列表 */
protected String ancestors;

/** 当前层级排序号 */
protected Integer treeSort;

/** 是否是末级,叶子节点(0 否,1 是)*/
protected String treeLeaf;

/** 节点层次级别 */
protected Integer treeLevel;

/** 树形下级列表 */
protected List<T> childList;

/** 项目id */
protected String projectId;

public List<T> getChildList() {
//根据是否是叶子节点判断,返回空集合
if(childList==null){
if("0".equals(String.valueOf(treeLeaf))){
return new ArrayList<>();
}
}
return childList;
}
}

package com.bdip.util;

import cn.hutool.core.collection.CollectionUtil;
import com.bdip.domain.shlq.TreeInfo;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;

import javax.validation.constraints.NotNull;
import java.lang.reflect.Field;
import java.util.*;

import static java.util.stream.Collectors.toList;

/**
* 树形工具类
*/
public class TreeUtil<T extends TreeInfo> {

/**
* 集合转树结构
*
* @param collection 目标集合
* @param clazz 集合元素类型
* @return 转换后的树形结构
*/
public static <T> Collection<T> toTree(@NotNull Collection<T> collection, @NotNull Class<T> clazz) {
return toTree(collection, null, null, null, clazz);
}

/**
* 集合转树结构
*
* @param collection 目标集合
* @param id 节点编号字段名称
* @param parent 父节点编号字段名称
* @param children 子节点集合属性名称
* @param clazz 集合元素类型
* @return 转换后的树形结构
*/
public static <T> Collection<T> toTree(@NotNull Collection<T> collection, String id, String parent, String children, @NotNull Class<T> clazz) {
try {
if (collection == null || collection.isEmpty()) return null;// 如果目标集合为空,直接返回一个空树
if (StringUtils.isEmpty(id)) id = "id"; // 如果被依赖字段名称为空则默认为id
if (StringUtils.isEmpty(parent)) parent = "parentId"; // 如果依赖字段为空则默认为parent
if (StringUtils.isEmpty(children)) children = "childList"; // 如果子节点集合属性名称为空则默认为children

// 初始化根节点集合, 支持 Set 和 List
Collection<T> roots;
if (collection.getClass().isAssignableFrom(Set.class)) {
roots = new HashSet<>();
} else {
roots = new ArrayList<>();
}

// 获取 id 字段, 从当前对象或其父类
Field idField;
try {
idField = clazz.getDeclaredField(id);
} catch (NoSuchFieldException e1) {
idField = clazz.getSuperclass().getDeclaredField(id);
}

// 获取 parentId 字段, 从当前对象或其父类
Field parentField;
try {
parentField = clazz.getDeclaredField(parent);
} catch (NoSuchFieldException e1) {
parentField = clazz.getSuperclass().getDeclaredField(parent);
}

// 获取 children 字段, 从当前对象或其父类
Field childrenField;
try {
childrenField = clazz.getDeclaredField(children);
} catch (NoSuchFieldException e1) {
childrenField = clazz.getSuperclass().getDeclaredField(children);
}

// 设置为可访问
idField.setAccessible(true);
parentField.setAccessible(true);
childrenField.setAccessible(true);

// 找出所有的根节点
for (T c : collection) {
Object parentId = parentField.get(c);
if (isRootNode(parentId)) {
roots.add(c);
}
}

// 从目标集合移除所有根节点
collection.removeAll(roots);

// 遍历根节点, 依次添加子节点
for (T root : roots) {
addChild(root, collection, idField, parentField, childrenField);
}

// 关闭可访问
idField.setAccessible(false);
parentField.setAccessible(false);
childrenField.setAccessible(false);

return roots;
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}

/**
* 为目标节点添加孩子节点
* @param node 目标节点
* @param collection 目标集合
* @param idField ID 字段
* @param parentField 父节点字段
* @param childrenField 字节点字段
*/
private static <T> void addChild(@NotNull T node, @NotNull Collection<T> collection, @NotNull Field idField, @NotNull Field parentField, @NotNull Field childrenField) throws IllegalAccessException {
Object id = idField.get(node);
Collection<T> children = (Collection<T>) childrenField.get(node);
// 如果子节点的集合为 null, 初始化孩子集合
if (children == null) {
if (collection.getClass().isAssignableFrom(Set.class)) {
children = new HashSet<>();
} else children = new ArrayList<>();
}

for (T t : collection) {
Object o = parentField.get(t);
if (id.equals(o)) {
// 将当前节点添加到目标节点的孩子节点
children.add(t);
// 重设目标节点的孩子节点集合,这里必须重设,因为如果目标节点的孩子节点是null的话,这样是没有地址的,就会造成数据丢失,所以必须重设,如果目标节点所在类的孩子节点初始化为一个空集合,而不是null,则可以不需要这一步,因为java一切皆指针
childrenField.set(node, children);
// 递归添加孩子节点
addChild(t, collection, idField, parentField, childrenField);
}
}
}

/**
* 判断是否是根节点, 判断方式为: 父节点编号为空或为 0, 则认为是根节点.
* @param parentId 父节点编号
* @return 是否是根节点
*/
private static boolean isRootNode(Object parentId) {
boolean flag = false;
if (parentId == null) {
flag = true;
} else if (parentId instanceof String && (StringUtils.isEmpty(String.valueOf(parentId)) || parentId.equals("0"))) {
flag = true;
} else if (parentId instanceof Integer && Integer.valueOf(0).equals(parentId)) {
flag = true;
}
return flag;
}

/**
* 获取指定菜单的所有子节点
* @param childTrees 子节点收集器
* @param trees 当前菜单
* @param parentTreeId 父菜单ID
*/
public void treeOrgChildren(List<T> trees, String parentTreeId, List<T> childTrees) {
for (T tree: trees) {
if (tree.getParentId() != null && tree.getParentId().equals(parentTreeId)) {
treeOrgChildren(trees, tree.getId(), childTrees);
childTrees.add(tree);
}
}
}

/**
* 递归获取父级ids
* @param trees
* @param treeId
* @param parentIds
*/
public void treeOrgParent(List<T> trees, String treeId, List<String> parentIds) {
for (T tree : trees) {
if (StringUtils.isEmpty(tree.getParentId())) {
continue;
}
//判断是否有父节点
if (treeId.equals(tree.getId())) {
parentIds.add(tree.getParentId());
treeOrgParent(trees, tree.getParentId(), parentIds);
}
}
}

/**
* 获取树状结构,根据hitIds获取选中,维持树形结果
* @param treeList 搜索的树列表
* @param hitIds 命中的ids
* @return
*/
public List<T> searchNode(List<T> treeList,List<String> hitIds) {
List<T> filterTree = Lists.newArrayList();
for (T t : treeList) {
T node = filterTree(t, hitIds);
filterTree.add(node);
}
return filterTree.stream().filter(Objects::nonNull).collect(toList());
}
public List<Map> searchMapNode(List<Map> treeList,List<String> hitIds) {
List<Map> filterTree = Lists.newArrayList();
for (Map t : treeList) {
Map node = filterMapTree(t, hitIds);
filterTree.add(node);
}
return filterTree.stream().filter(Objects::nonNull).collect(toList());
}

/**
* 过滤树
* @param tree
* @param hitIds
* @return
*/
private T filterTree(T tree, List<String> hitIds) {
if(isRemoveNode(tree, hitIds)){
return null;
}
if(tree.getChildList()==null){
return tree;
}
Iterator<T> iterator = (Iterator<T>)tree.getChildList().iterator();
while (iterator.hasNext()){
T child = iterator.next();
deleteNode(child, iterator, hitIds);
}
return tree;
}
private Map filterMapTree(Map tree, List<String> hitIds) {
if(isRemoveMapNode(tree, hitIds)){
return null;
}
if(tree.get("children")==null){
return tree;
}
Iterator<Map> iterator = ((List<Map>) tree.get("children")).iterator();
while (iterator.hasNext()){
Map child = iterator.next();
deleteMapNode(child, iterator, hitIds);
}
return tree;
}

/**
* 删除节点
* @param child
* @param iterator
* @param hitIds
*/
private void deleteNode(T child, Iterator<T> iterator, List<String> hitIds) {
if(isRemoveNode(child, hitIds)){
iterator.remove();
return;
}
List<T> childrenList = (List<T>)child.getChildList();
if(CollectionUtil.isEmpty(childrenList)){
return;
}
Iterator<T> children = childrenList.iterator();
while (children.hasNext()){
T childChild = children.next();
deleteNode(childChild,children, hitIds);
}
}
private void deleteMapNode(Map child, Iterator<Map> iterator, List<String> hitIds) {
if(isRemoveMapNode(child, hitIds)){
iterator.remove();
return;
}
List<Map> childrenList = (List<Map>)child.get("children");
if(CollectionUtil.isEmpty(childrenList)){
return;
}
Iterator<Map> children = childrenList.iterator();
while (children.hasNext()){
Map childChild = children.next();
deleteMapNode(childChild,children, hitIds);
}
}

/**
* 判断该节点是否该删除
* @param root
* @param hitIds 命中的节点
* @return ture 需要删除 false 不能被删除
*/
private boolean isRemoveNode(T root, List<String> hitIds) {
List<T> children = (List<T>) root.getChildList();
// 叶子节点
if(CollectionUtil.isEmpty(children)){
return !hitIds.contains(root.getId());
}
// 子节点
if(hitIds.contains(root.getId())){
return false;
}
boolean bool = true;
for (T child : children) {
if(!isRemoveNode(child,hitIds)){
bool = false;
break;
}
}
return bool;
}
private boolean isRemoveMapNode(Map root, List<String> hitIds) {
List<Map> children = (List<Map>) root.get("children");
// 叶子节点
if(CollectionUtil.isEmpty(children)){
return !hitIds.contains(root.get("id"));
}
// 子节点
if(hitIds.contains(root.get("id"))){
return false;
}
boolean bool = true;
for (Map child : children) {
if(!isRemoveMapNode(child,hitIds)){
bool = false;
break;
}
}
return bool;
}

/**
* 获取排序子节点
* @param children
* @return
*/
private List<T> getSortChildren(List<T> children){
List<T> sort = children.stream().sorted(Comparator.comparing(T::getTreeSort).thenComparing(T::getUpdateTime,Comparator.reverseOrder())).collect(toList());
return sort;
}

/**
* 获取某个父节点下面的所有子节点
* @param menuList
* @param pid 选中的父节点id
* @return
*/
public static <T extends TreeInfo> List<T> tree2List(List<T> menuList, String pid, Boolean select, List<T> childMenu){

for(T mu: menuList){
Boolean canSelect = select;
//选中的元素收集
if(canSelect){
// mu.setChildren(null);
childMenu.add(mu);
}
if(select || (pid!=null && pid.equals(mu.getId()))){
canSelect = true;
}
//递归遍历下一级
if(mu.getChildList()!=null){
tree2List(mu.getChildList(),pid,canSelect,childMenu);
}
mu.setChildList(null);

}
return childMenu;
}



}
//使用方法  List<VideoInfo> reList = (List<VideoInfo>) TreeUtil.toTree(list, VideoInfo.class);

 //list转换为json字符串
// String dataString = JSON.toJSONString(reList, SerializerFeature.WriteMapNullValue);
// String newDataS = dataString.replaceAll("childList", "children");
// //json字符串转换为list
// List res = JSONObject.parseArray(newDataS, Map.class);





//MD5加密
public static void main(String[] args) throws IOException {
File sourceFileSaveFile = new File("C:\\Users\\admin\\Documents\\WXWork\\1688856580287494\\Cache\\Video\\2022-06\\snsdyvideodownload.mp4");
FileInputStream fileInputStream = new FileInputStream(sourceFileSaveFile);
String currFileMd5 = DigestUtils.md5DigestAsHex(fileInputStream);
System.out.println(currFileMd5);
}




//文件上传
@Value("${shlq_file_path}")
public String sourceFileSaveBasePath;

@ApiImplicitParams({
@ApiImplicitParam(name = "name",value = "文件名称"),
@ApiImplicitParam(name = "format",value = "文件后缀",required = true),
@ApiImplicitParam(name = "file",value = "文件流",required = true,dataType = "__file"),
@ApiImplicitParam(name = "sourceFileMd5",value = "源文件md5",required = true)
})
@ApiResponses({
@ApiResponse(code = 700,message = "参数未填写"),
@ApiResponse(code = 602,message = "文件持久化失败"),
})
@RequestMapping(value = "/addDiscose", method = RequestMethod.POST)
public CommonResult addDiscose(@RequestParam(name="name") String name,
@RequestParam(name="fileSuffix") String fileSuffix,
@RequestParam("sourceFileMd5")String sourceFileMd5,
@RequestParam("fileCount")int fileCount
){
DiscloseInfo info=new DiscloseInfo();
info.setFormat(fileSuffix);
info.setName(name);
info.setIsDelete(0);
info.setUrl(sourceFileSaveBasePath);
CommonResult result =service.addDiscose(info,sourceFileMd5,fileCount);
return result;
}

CommonResult addDiscose(DiscloseInfo info,String md5,int fileCount);


@Value("${shlq_file_path}")
String shlqFilePath;

@Autowired
SHLQVideoChunkUploadImpl chunkImpl;

@Override
public CommonResult addDiscose(DiscloseInfo info,String md5,int fileCount) {
FileUpload fileUpload = new FileUpload();
fileUpload.setFileUploadType(FileUploadTypeEnum.test);

fileUpload.setSourceFileName(info.getName());
fileUpload.setSourceFileSuffix(info.getFormat());
fileUpload.setSourceFileMd5(md5);
fileUpload.setBlockFileTotalCount(fileCount);
fileUpload.setBlockFileSaveBasePath(shlqFilePath);

fileUpload.setAutoMerage(true);
fileUpload.setSourceFileSaveBasePath(shlqFilePath);

CommonResult commonResult = chunkImpl.createUploadTask(fileUpload);
// if(ShareConstants.CODE_SUCCESS == commonResult.getCode()){
// info.setUrl(shlqFilePath +fileUpload.getSourceFileSaveRelativePath());
// int result = baseMapper.insert(info);
// if( 1 != result){
// commonResult.setCode(ShareConstants.CODE_FAIL);
// commonResult.setMessage("操作失败");
// }
// }
return CommonResult.success(shlqFilePath +fileUpload.getSourceFileSaveRelativePath());

package com.bdip.service.shlq.impl;

import com.bdip.common.result.CommonResult;
import com.bdip.upload.enums.ServiceEnum;
import com.bdip.upload.model.FileUpload;
import com.bdip.upload.template.ChunkUploadAbstract;
import org.springframework.stereotype.Service;

import java.util.HashSet;
import java.util.Set;

@Service
public class SHLQVideoChunkUploadImpl extends ChunkUploadAbstract {
@Override
public CommonResult createUploadTaskCustom(FileUpload fileUpload) {
System.out.println("创建上传任务成功");
return CommonResult.success();
}

@Override
public CommonResult fileUploadSuccess(FileUpload fileUpload) {
System.out.println("切块上传成功");
return CommonResult.success();
}

//切块上传失败调用,如果无具体实现可以不用重写
@Override
public CommonResult fileUploadError(FileUpload fileUpload) {
return super.fileUploadError(fileUpload);
}

//切块合并成功调用,如果无具体实现可以不用重写
@Override
public CommonResult fileBlockMergeSuccess(FileUpload fileUpload) {
return super.fileBlockMergeSuccess(fileUpload);
}

//切块合并失败调用,如果无具体实现可以不用重写
@Override
public CommonResult fileBlockMergeError(FileUpload fileUpload) {
return super.fileBlockMergeError(fileUpload);
}


/**
* 程序启动前调用需要的参数,
* 设置需要启动的服务名称,只有指定服务启动完成,才开始调用
* @return
*/
@Override
public FileUpload getUploadInitParam() {
FileUpload fileUpload = new FileUpload();
Set<ServiceEnum> set = new HashSet<>();
set.add(ServiceEnum.bdip_secondaryDevelop_server); //设置需要启动的服务
fileUpload.setRunServiceSet(set);
return super.getUploadInitParam();
}
@Override
public CommonResult uploadInit(FileUpload fileUpload) {
System.out.println("上传中断初始化");
return super.uploadInit(fileUpload);
}
}


}

package com.bdip.upload.template;
import com.bdip.common.result.CommonResult;
import com.bdip.common.result.EnumResultCode;
import com.bdip.common.utils.FileUtil;
import com.bdip.common.utils.ParamUtil;
import com.bdip.upload.dao.bdipcloud.FileUploadMapper;
import com.bdip.upload.enums.FileUploadTypeEnum;
import com.bdip.upload.model.FileUpload;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.util.DigestUtils;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.List;
/**
* @Author: wyw
* @Date: 2021/10/29 0029-17:34
* 切块上传
*/
public abstract class ChunkUploadAbstract<T extends FileUpload> extends FileUploadAbstract<T>{
//上传初始化
public CommonResult uploadInit(T t){
splicingLog(t,"uploadInit","默认调用");
return CommonResult.success();
};

//初始化,定时上传需要参数
public T getUploadInitParam(){
return null;
}

//创建上传任务(自定义)
public abstract CommonResult createUploadTaskCustom(T t);

//文件上传失败调用
public CommonResult fileUploadError(T t){
splicingLog(t,"fileUploadError(文件上传失败调用)","默认");
return CommonResult.success();
};

//文件切块合并完成调用
public CommonResult fileBlockMergeSuccess(T t){
splicingLog(t,"fileBlockMergeSuccess(默认切块合并成功调用)","默认调用");
return CommonResult.success();
};

//文件切块合并失败调用
public CommonResult fileBlockMergeError(T t){
splicingLog(t,"fileBlockMergeError(切块合并失败调用)","默认调用");
return CommonResult.success();
};

//压缩包解压前调用
public CommonResult fileUnzipBefore(T t){
splicingLog(t,"fileUnzipBefore(文件解压前调用)","默认调用");
return CommonResult.success();
}

//压缩包解压完成调用
public CommonResult fileUnzipSuccess(T t){
splicingLog(t,"fileUnzipSuccess(文件解压成功调用)","默认调用");
return CommonResult.success();
};

//压缩包解压失败调用
public CommonResult fileUnzipError(T t){
splicingLog(t,"fileUnzipError(文件解压失败调用)","默认调用");
return CommonResult.success();
};

//获取oss上传任务记录,定时上传
public T getOssUploadTask(){
logger.info("getOssUploadTask(获取oss上传任务记录)默认调用");
return null;
};


//创建上传任务
public final CommonResult createUploadTask(T t){
String methodInfo = "createUploadTask(创建上传任务)";
splicingLog(t,methodInfo,"开始");
//必填参数校验
CommonResult commonResult = parameterCheck(t);
int code = commonResult.getCode();
if(code!=200){
return commonResult;
}
try {
//源文件存在校验
commonResult = sourceExistsCheck(1,t);
code = commonResult.getCode();
if(code == 200){
//文件已经存在,并且后续的业务也已经执行完毕
return CommonResult.setCommonResult(EnumResultCode.FILE_EXIST,commonResult.getData());
}else if(code!=-1){
return commonResult;
}

int blockFileTotalCount = t.getBlockFileTotalCount(); //切块总数量
boolean autoMerage = t.getAutoMerage(); //是否自动合并
List<Integer> defectBlockIndexList = getDefectBlockFile(t); //获取缺失切块索引
t.setBlockFileSuccessCount(blockFileTotalCount-defectBlockIndexList.size());
t.setMerageStatus(0);

splicingLog(t,methodInfo,"开始调用接口 createUploadTaskCustom(创建上传任务)");
commonResult = createUploadTaskCustom(t);
code = commonResult.getCode();
splicingLog(t,methodInfo,"调用接口完成 createUploadTaskCustom(创建上传任务)","code:"+code);
if(code!=200){
return commonResult;
}
//切块完整,调用合并
if(defectBlockIndexList.size()==0){
splicingLog(t,methodInfo,"切块完整,直接复用");
//是否开启自动合并
if(autoMerage){
commonResult = fileMerge(t);
if(commonResult.getCode()==200){
return CommonResult.setCommonResult(EnumResultCode.FILE_EXIST,commonResult.getData());
}
}
}
return commonResult;
}catch (Exception e){
splicingLog(t,methodInfo,"创建上传任务抛出异常了");
e.printStackTrace();
return CommonResult.setCommonResult(EnumResultCode.CREATE_UPLOAD_TASK_FAILED);
}
};

/**
* 切块上传
* @param t
* @return
*/
public final CommonResult fileUpload(T t){
String methodInfo = "fileUpload(切块上传)";
splicingLog(t,methodInfo,"开始");
MultipartFile blockFileSteam = t.getFile(); //切块文件
if(ParamUtil.ParamNullOrEmpty(blockFileSteam)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"切块文件未赋值(file)");
}
Integer blockFileIndex = t.getBlockFileIndex();
if(ParamUtil.ParamNullOrEmpty(blockFileIndex)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"切块索引未赋值(blockFileIndex)");
}

//必填参数校验
CommonResult commonResult = parameterCheck(t);
int code = commonResult.getCode();
if(code!=200){
return commonResult;
}
String blockFileSavePath = t.getBlockFileSavePath(); //切块保存绝对路径
int blockFileTotalCount = t.getBlockFileTotalCount(); //切块总数量
boolean autoMerage = t.getAutoMerage(); //是否自动合并
try{
if(t.isSourceExistsCheck()){
//源文件存在校验
commonResult = sourceExistsCheck(2,t);
code = commonResult.getCode();
if(code == 200){
return CommonResult.setCommonResult(EnumResultCode.FILE_EXIST,commonResult.getData());
}else if(code!=-1){
return commonResult;
}
}
//切块绝对路径
if(!FileUtil.fileExists(blockFileSavePath)){
splicingLog(t,methodInfo,"切块文件不存在,开始持久化,持久化路径:"+blockFileSavePath);
//流文件持久化 重试机制3次
boolean b = FileUtil.fileStreamPersistence(blockFileSteam.getInputStream(),blockFileSavePath);
//文件持久化失败
if(!b){
//切块上传失败
splicingLog(t,methodInfo,"开始调用接口,fileBlockUploadError(切块上传失败接口)");
CommonResult c = fileUploadError(t);
code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileBlockUploadError(切块上传失败接口)","code:"+code);
if(code!=200){
return c;
}
return CommonResult.setCommonResult(EnumResultCode.FILE_UPLOAD_FAILED,"持久化错误");
}
}
int defectBlockCount = getDefectBlockFile(t).size(); //获取缺失切块
t.setBlockFileSuccessCount(blockFileTotalCount-defectBlockCount);
//切块上传完成,需要自己实现
splicingLog(t,methodInfo,"开始调用接口,fileBlockUploadSuccess(切块上传完成)");
commonResult = fileUploadSuccess(t);
commonResult.setData(t.getDefectBlockIndexList());
code = commonResult.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileBlockUploadSuccess(切块上传完成)","code:"+code);
if(code != 200){
//切块上传失败
splicingLog(t,methodInfo,"开始调用接口,fileBlockUploadError(切块上传失败接口)");
CommonResult c = fileUploadError(t);
code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileBlockUploadError(切块上传失败接口)","code:"+code);
if(code!=200){
return c;
}
return commonResult;
}
//切块完整
if(defectBlockCount==0){
if(autoMerage){
commonResult = fileMerge(t);
return commonResult;
}
}
return commonResult;
}catch (Exception e){
splicingLog(t,methodInfo,"切块上传抛出异");
e.printStackTrace();
try {
splicingLog(t,methodInfo,"开始调用接口,fileBlockUploadError(切块上传失败接口)");
CommonResult c = fileUploadError(t);
code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileBlockUploadError(切块上传失败接口)","code:"+code);
if(code!=200){
return c;
}
}catch (Exception ee){
ee.printStackTrace();
}
return CommonResult.setCommonResult(EnumResultCode.FILE_UPLOAD_FAILED,"抛出异常");
}
}

/**
* 切块合并
* @param t
* @return
*/
public final CommonResult fileMerge(T t){
String methodInfo = "fileMerge(切块合并)";
splicingLog(t,methodInfo,"开始");
try {
String sourceFileSavePath = t.getSourceFileSavePath(); //源文件保存绝对路径
String sourceFileSuffix = t.getSourceFileSuffix(); //源文件后缀
int blockFileTotalCount = t.getBlockFileTotalCount(); //切块的总数量
boolean merageSuccessBlockDelete = t.getMerageSuccessBlockDelete(); //合并成功后切块文件是否删除
boolean autoUnzip = t.getAutoUnzip(); //是否自动解压
String sourceFileMd5 = t.getSourceFileMd5(); //文件md5
List<String> unzipSupportSuffixList = t.getUnzipSupportSuffixList(); //解压支持压缩包后缀
//文件不存在需要合并
synchronized (sourceFileMd5.intern()){
if(!FileUtil.fileExists(sourceFileSavePath)){
splicingLog(t,methodInfo,"文件不存在,开始合并"+sourceFileSavePath);
Long fileSize = 0L;
for(int i = 0;i<blockFileTotalCount;i++){
t.setBlockFileIndex(i);
String blockFilePath = t.getBlockFileSavePath();
File file = new File(blockFilePath);
if(file.exists()){
boolean blockMerage = blockMerage(blockFilePath,fileSize,sourceFileSavePath);
fileSize += file.length();
//合并失败
if(!blockMerage){
splicingLog(t,methodInfo,"合并失败","开始调用接口,fileBlockMergeError(切块合并失败调用)");
CommonResult c = fileBlockMergeError(t);
int code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileBlockMergeError(切块合并失败调用)","code:"+code);
if(code!=200){
return c;
}
return CommonResult.setCommonResult(EnumResultCode.BLOCK_FILE_MERGE_FAILED);
}
}else{
FileUtil.fileDelete(sourceFileSavePath);
List<Integer> defectBlockFile = getDefectBlockFile(t);
splicingLog(t,methodInfo,"切块合并,","切块缺失,缺失索引"+i+"");
splicingLog(t,methodInfo,"合并失败","开始调用接口,fileBlockMergeError(切块合并失败调用)");
CommonResult c = fileBlockMergeError(t);
int code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileBlockMergeError(切块合并失败调用)","code:"+code);
if(code!=200){
return c;
}
return CommonResult.setCommonResult(EnumResultCode.BLOCK_DEFICIENCY,defectBlockFile);
}
}

String fileMd5 = FileUtil.getFileMd5(sourceFileSavePath);
if(!sourceFileMd5.equals(fileMd5)){
System.out.println("合并失败,合并的文件:"+fileMd5+"====== 前端传输md5"+sourceFileMd5+"====不一致,合并文件删除,合并失败");
FileUtil.fileDelete(sourceFileSavePath);
blockFileDelete(t);
return CommonResult.setCommonResult(EnumResultCode.BLOCK_FILE_MERGE_FAILED);
}
}else{
splicingLog(t,methodInfo,"文件存在,复用"+sourceFileSavePath);
}
}

//是否开启删除切块
if(merageSuccessBlockDelete){
File blockFile = new File(t.getBlockFileSavePath());
File blockFolatFile = blockFile.getParentFile();
splicingLog(t,methodInfo,"文件合并成功,删除切块"+blockFolatFile.getPath(),"关闭方式(merageSuccessBlockDelete==false)");
blockFileDelete(t);
}else{
splicingLog(t,methodInfo,"文件合并成功,未开启,删除切块(merageSuccessBlockDelete==true)");
}

splicingLog(t,methodInfo,"开始调用接口 fileBlockMergeSuccess(切块合并完成)");
CommonResult fileBlockMergeCommonResult = fileBlockMergeSuccess(t);
int code = fileBlockMergeCommonResult.getCode();
splicingLog(t,methodInfo,"调用接口完成 fileBlockMergeSuccess(切块合并完成)","code:"+code);
//合并后调用自己实现接口报错了
if(200!=code){
splicingLog(t,methodInfo,"合并失败","开始调用接口,fileBlockMergeError(切块合并失败调用)");
CommonResult c = fileBlockMergeError(t);
code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileBlockMergeError(切块合并失败调用)","code:"+code);
if(code!=200){
return c;
}
return fileBlockMergeCommonResult;
}

//是否开启解压
if(autoUnzip&&unzipSupportSuffixList.contains(sourceFileSuffix)){
return fileUnzip(t);
}
return fileBlockMergeCommonResult;
}catch (Exception e){
splicingLog(t,methodInfo,"切块合并抛出异常了");
e.printStackTrace();
try {
splicingLog(t,methodInfo,"合并失败","开始调用接口,fileBlockMergeError(切块合并失败调用)");
CommonResult c = fileBlockMergeError(t);
int code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileBlockMergeError(切块合并失败调用)","code:"+code);
if(code!=200){
return c;
}
}catch (Exception ee){
ee.printStackTrace();
}
return CommonResult.setCommonResult(EnumResultCode.BLOCK_FILE_MERGE_FAILED);
}
}

private void blockFileDelete(T t){
int blockFileTotalCount = t.getBlockFileTotalCount();
for(int i = 0;i<blockFileTotalCount;i++) {
t.setBlockFileIndex(i);
String blockFilePath = t.getBlockFileSavePath();
FileUtil.fileDelete(blockFilePath);
}
File blockFile = new File(t.getBlockFileSavePath());
File blockFolatFile = blockFile.getParentFile();
if(blockFolatFile.listFiles().length==0){
boolean b = FileUtil.fileDelete(blockFolatFile);
System.out.println(blockFolatFile+"路径下的切块删除是否成功-----------------"+b);
}
}

/**
* 源文件存在校验
* @param type 1:创建上传任务
* 2:文件上传
* @param t
* @return code 200 文件存在逻辑执行完毕
* -1 源文件不存在
* 其他 返回异常
*/
private CommonResult sourceExistsCheck(int type,T t){
String methodInfo = "sourceExistsCheck(源文件存在校验)";
CommonResult commonResult = new CommonResult();
String sourceFileSavePath = t.getSourceFileSavePath(); //源文件保存绝对路径
String sourceFileMd5 = t.getSourceFileMd5(); //源文件md5值
String sourceFileSuffix = t.getSourceFileSuffix(); //源文件后缀
int blockFileTotalCount = t.getBlockFileTotalCount(); //切块总数量
boolean autoUnzip = t.getAutoUnzip(); //是否自动解压
boolean autoMerage = t.getAutoMerage(); //是否自动合并
List<String> unzipSupportSuffixList = t.getUnzipSupportSuffixList(); //压缩包解压支持后缀
int code = 0;
try {
if(autoMerage){
if(FileUtil.fileExists(sourceFileSavePath)){
File sourceFileSaveFile = new File(sourceFileSavePath);
FileInputStream fileInputStream = new FileInputStream(sourceFileSaveFile);
String currFileMd5 = DigestUtils.md5DigestAsHex(fileInputStream);
if(fileInputStream!=null){
fileInputStream.close();
}
//上传文件的md5和本地持久化的文件md5值不一致,识别成受损文件,重新上传
if(!sourceFileMd5.equals(currFileMd5)){
//文件删除
splicingLog(t,methodInfo,"源文件存在,上传文件和本地文件md5不一致,识别成受损文件,删了","删除文件路径:"+sourceFileSavePath);
sourceFileSaveFile.delete();
}else{
splicingLog(t,methodInfo,"源文件存在,直接复用,不需要上传了,文件路径"+sourceFileSavePath);
t.setBlockFileSuccessCount(blockFileTotalCount);
t.setMerageStatus(2);
if(type==1){
//创建上传任务,如果返回状态码不是200我就识别成上传失败了
splicingLog(t,methodInfo,"开始调用接口,createUploadTaskCustom(创建上传任务)");
commonResult = createUploadTaskCustom(t);
code = commonResult.getCode();
splicingLog(t,methodInfo,"调用接口完成,createUploadTaskCustom(创建上传任务)","code:"+code);
if(code!=200){
return commonResult;
}
}else{
//创建上传任务,如果返回状态码不是200我就识别成上传失败了
splicingLog(t,methodInfo,"开始调用接口,fileBlockUploadSuccess(切块上传完成)");
commonResult = fileUploadSuccess(t);
code = commonResult.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileBlockUploadSuccess(切块上传完成)","code:"+code);
if(code!=200){
splicingLog(t,methodInfo,"开始调用接口,fileUploadError(文件上传失败调用)");
CommonResult c = fileUploadError(t);
code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileUploadError(文件解压失败调用)","code:"+code);
if(code!=200){
return c;
}
return commonResult;
}
}

t.setMerageStatus(2);
splicingLog(t,methodInfo,"开始调用接口,fileBlockMergeSuccess(切块合并完成)");
commonResult = fileBlockMergeSuccess(t);
code = commonResult.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileBlockMergeSuccess(切块合并完成)","code:"+code);
if(code!=200){
splicingLog(t,methodInfo,"开始调用接口,fileBlockMergeError(切块合并失败)");
CommonResult c = fileBlockMergeError(t);
code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileBlockMergeError(切块合并失败)","code:"+code);
if(code!=200){
return c;
}
return commonResult;
}
//是否开启解压
if(autoUnzip&&unzipSupportSuffixList.contains(sourceFileSuffix)){
return fileUnzip(t);
}
return commonResult;
}
}
}
commonResult.setCode(-1);
}catch (Exception e){
splicingLog(t,"sourceExistsCheck","源文件校验抛出异常了");
e.printStackTrace();
return CommonResult.success(EnumResultCode.FAILED);
}
return commonResult;
}

/**
* 获取缺失的切块的索引
* @return 缺失的切块索引
*/
private List<Integer> getDefectBlockFile(T t){
int blockFileTotalCount = t.getBlockFileTotalCount();
List<Integer> defectBlockIndexList = new ArrayList<>();
Integer blockFileIndex = t.getBlockFileIndex();
for(int i = 0;i<blockFileTotalCount;i++){
t.setBlockFileIndex(i);
String blockFileSavePath = t.getBlockFileSavePath();
if(!FileUtil.fileExists(blockFileSavePath)){
defectBlockIndexList.add(i);
}
}
t.setBlockFileIndex(blockFileIndex);
t.setDefectBlockIndexList(defectBlockIndexList);
return defectBlockIndexList;
}

/**
*
* @param targetFilePath 待写入切块文件
* @param start 写入目标的起始点
* @param resultFilePath 目标文件
*/
private static boolean blockMerage(String targetFilePath,long start,String resultFilePath){
File targetFile = new File(targetFilePath);
File resultFile = new File(resultFilePath);
File parentFile = resultFile.getParentFile();
//文件夹不存在创建
if(!parentFile.exists()){
parentFile.mkdirs();
}
// 使用随机读写流,往目标的指定位置写。
FileInputStream fileIn = null;
FileChannel fileChannel = null;
RandomAccessFile radomFile = null;
try{
radomFile = new RandomAccessFile(resultFile, "rw");
fileIn = new FileInputStream(targetFile);
fileChannel = fileIn.getChannel();
ByteBuffer byteBuffer = ByteBuffer.allocate((int)fileChannel.size());
while ((fileChannel.read(byteBuffer)) > 0) {
}
radomFile.seek(start);
radomFile.write(byteBuffer.array());
}catch(Exception e){
// 失败
e.printStackTrace();
return false;
}finally{
if(fileIn != null){
try {
fileIn.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if(fileChannel != null){
try {
fileChannel.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if(radomFile != null){
try {
radomFile.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return FileUtil.fileExists(resultFilePath);
}


/**
* 文件解压
* @param t
*/
public CommonResult fileUnzip(T t){
String methodInfo = "ChunkUploadAbstract fileUnzip";
String sourceFileSavePath = t.getSourceFileSavePath(); //源文件存放路径
String unzipSaveBasePath = t.getUnzipSaveBasePath(); //压缩包解压存放基本路径
try {

//解压前
splicingLog(t,methodInfo,"开始调用接口,fileUnzipBefore(文件解压前调用)","文件路径,压缩包路径:"+sourceFileSavePath,"解压基本路径:"+unzipSaveBasePath);
CommonResult commonResult = fileUnzipBefore(t);
int code = commonResult.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileUnzipBefore(文件解压前调用)","code:"+code);

if(code!=200){
splicingLog(t,methodInfo,"文件解压失败","开始调用接口,fileUnzipError(文件解压失败调用)");
CommonResult c = fileUnzipError(t);
code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileUnzipError(文件解压失败调用)","code:"+code);
if(code!=200){
return c;
}
return commonResult;
}

//解压中
commonResult = super.fileUnzip(t);
code = commonResult.getCode();

//解压后
//失败
if(code!=200){
splicingLog(t,methodInfo,"文件解压失败","开始调用接口,fileUnzipError(文件解压失败调用)");
CommonResult c = fileUnzipError(t);
code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileUnzipError(文件解压失败调用)","code:"+code);
if(code!=200){
return c;
}
return commonResult;
}

//成功
splicingLog(t,methodInfo,"文件解压成功","开始调用接口 fileUnzipSuccess(解压成功调用)");
commonResult = fileUnzipSuccess(t);
code = commonResult.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileUnzipSuccess(解压成功调用)","code:"+code);
if(code!=200){
splicingLog(t,methodInfo,"文件解压失败","开始调用接口,fileUnzipError(文件解压失败调用)");
CommonResult c = fileUnzipError(t);
code = c.getCode();
splicingLog(t,methodInfo,"调用接口完成,fileUnzipError(文件解压失败调用)","code:"+code);
if(code!=200){
return c;
}
}
return commonResult;
}catch (Exception e){
splicingLog(t,methodInfo,"解压抛出异常了");
e.printStackTrace();
return CommonResult.setCommonResult(EnumResultCode.FILE_UNZIP_FAILED,"抛异常了");
}
}

/**
* 设置缺失切块索引
* @param list
*/
public void setDefectBlockIndexList(List<T> list, FileUploadTypeEnum fileUploadTypeEnum){
for(T t :list){
if(!ParamUtil.ParamNullOrEmpty(fileUploadTypeEnum)){
t.setFileUploadType(fileUploadTypeEnum);
}
List<Integer> defectBlockIndexList = new ArrayList<>();
String sourceFileSavePath = t.getSourceFileSavePath();
//源文件存在,应该是更新状态失败了,随便一个切块重新上传即可
if(FileUtil.fileExists(sourceFileSavePath)){
defectBlockIndexList.add(0);
t.setDefectBlockIndexList(defectBlockIndexList);
return;
}

int blockFileTotalCount = t.getBlockFileTotalCount();
Integer blockFileIndex = t.getBlockFileIndex();
for(int i = 0;i<blockFileTotalCount;i++){
t.setBlockFileIndex(i);
String blockFileSavePath = t.getBlockFileSavePath();
if(!FileUtil.fileExists(blockFileSavePath)){
defectBlockIndexList.add(i);
}
}
t.setBlockFileIndex(blockFileIndex);
if(defectBlockIndexList.size()==0){
defectBlockIndexList.add(0);
}
t.setDefectBlockIndexList(defectBlockIndexList);
}
}
public void setDefectBlockIndexList(List<T> list){
setDefectBlockIndexList(list,null);
}
/**
* 参数校验
* @param t
* @return
*/
public CommonResult parameterCheck(T t){
CommonResult commonResult = super.parameterCheck(t);
if(commonResult.getCode()!=200){
return commonResult;
}

String sourceFileMd5 = t.getSourceFileMd5();
if(ParamUtil.ParamNullOrEmpty(sourceFileMd5)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"源文件md5没赋值(sourceFileMd5)");
}

Integer blockFileTotalCount = t.getBlockFileTotalCount(); //切块总数量
if(ParamUtil.ParamNullOrEmpty(blockFileTotalCount)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"切块总数量未赋值(blockFileTotalCount)");
}

String blockFileSaveBasePath = t.getBlockFileSaveBasePath(); //切块文件基本路径
if(ParamUtil.ParamNullOrEmpty(blockFileSaveBasePath)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"切块文件存放基本路径没赋值(blockFileSaveBasePath)");
}

boolean autoMerage = t.getAutoMerage();
if(autoMerage){
splicingLog(t,"","开启了自动合并,关闭方式 autoMerage = false");
String sourceFileSaveBasePath = t.getSourceFileSaveBasePath(); //源文件基本路径
if(ParamUtil.ParamNullOrEmpty(sourceFileSaveBasePath)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"开启了自动合并 autoMerage = ture,合并文件存放基本路径没赋值(sourceFileSaveBasePath)");
}
}else{
splicingLog(t,"","未开启自动合并,开启方法 autoMerage = ture");
}
return CommonResult.success();
}
}



package com.bdip.upload.model;
import com.bdip.common.constant.SpecialSymbolsConstants;
import com.bdip.common.utils.FileUtil;
import com.bdip.common.utils.ParamUtil;
import com.bdip.common.utils.TypeConver;
import com.bdip.upload.enums.FileUploadTypeEnum;
import com.bdip.upload.enums.OssUploadTriggerNodeEnum;
import com.bdip.upload.enums.ServiceEnum;
import com.bdip.upload.initialization.FileUploadInitialization;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.deser.std.EnumDeserializer;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.springframework.core.env.Environment;
import org.springframework.web.multipart.MultipartFile;

import java.io.Serializable;
import java.util.*;
/**
* @Author: wyw
* @Date: 2021/2/26 0026-14:21
* 里面的属性赋值方式,可以在子类进行重新
*/
@Data
@ApiModel("文件上传")
public class FileUpload extends Oss implements Serializable {
private static final long serialVersionUID = 6914967805155571562L;

public FileUpload(){
Environment environment = FileUploadInitialization.environment;
if(environment!=null){
if(environment.containsProperty("block_save_base_path")){
this.blockFileSaveBasePath = environment.getProperty("block_save_base_path");
}
if(environment.containsProperty("source_file_save_base_path")){
this.sourceFileSaveBasePath = environment.getProperty("source_file_save_base_path");
}
if(environment.containsProperty("source_file_net_base_path")){
this.sourceFileNetBasePath = environment.getProperty("source_file_net_base_path");
}
if(environment.containsProperty("unzip_save_base_path")){
this.unzipSaveBasePath = environment.getProperty("unzip_save_base_path");
}
}
}

@ApiModelProperty("任务id")
Integer id;

@ApiModelProperty("任务id UUId,对外暴露,唯一")
String taskId;

@ApiModelProperty("上传类型,自己上传要自己添加,这是必填的")
@JsonDeserialize(using = EnumDeserializer.class)
FileUploadTypeEnum fileUploadType;

public FileUploadTypeEnum getFileUploadType() {
return fileUploadType;
}
@ApiModelProperty("进度")
double progress;

@ApiModelProperty("上传状态 0:创建上传任务 1:上传中 2:上传完成 100:上传失败")
int status;

@ApiModelProperty("用户Id")
Integer userId;

@ApiModelProperty("项目Id")
Integer projectId;

@ApiModelProperty("目录树Id")
Integer treeId;

@ApiModelProperty(value = "源文件名称")
String sourceFileName;

@ApiModelProperty(value = "源文件后缀")
String sourceFileSuffix;

@ApiModelProperty("源文件名称+源文件后缀")
String sourceFileNameSuffix;

@ApiModelProperty(value = "源文件MD5值")
String sourceFileMd5;

@ApiModelProperty(value = "源文件大小 字节")
Long sourceFileSize = 0L;

@ApiModelProperty("源文件存放基本路径")
@JsonIgnore
String sourceFileSaveBasePath;

@ApiModelProperty("源文件网络基本路径")
@JsonIgnore
String sourceFileNetBasePath;

@ApiModelProperty("源文件保存绝对路径")
@JsonIgnore
String sourceFileSavePath;

@ApiModelProperty("源文件保存网络路径")
@JsonIgnore
String sourceFileNetPath;

@ApiModelProperty("源文件存放相对路径")
@JsonIgnore
String sourceFileSaveRelativePath;

@ApiModelProperty("源文件上传来源路径")
String sourceFileUploadOriginPath;

@ApiModelProperty("上传文件")
@JsonIgnore
public MultipartFile file;

@ApiModelProperty("切块大小 字节")
Long blockFileSize = 0L;

@ApiModelProperty(value = "切块总块数")
Integer blockFileTotalCount;

@ApiModelProperty(value = "切块成功块数")
Integer blockFileSuccessCount = 0;

@ApiModelProperty("切块文件MD5值")
String blockFileMd5;

@ApiModelProperty("切块下标(从0开始)")
Integer blockFileIndex;

@ApiModelProperty("切块保存基本路径")
@JsonIgnore
String blockFileSaveBasePath;

@ApiModelProperty("切块保存绝对路径")
@JsonIgnore
String blockFileSavePath;

@ApiModelProperty("切块缺失索引")
List<Integer> defectBlockIndexList = new ArrayList<>();

//----------------------------------------------合并 开始-----------------------------------------------------
@ApiModelProperty("切块是否自动合并 true:自动合并 false:不自动合并")
@JsonIgnore
boolean autoMerage = true;

@ApiModelProperty("合并状态 0:未合并 1:合并中 2:合并完成 100:合并失败")
@JsonIgnore
Integer merageStatus = 0;

@ApiModelProperty("合并成功切块删除")
@JsonIgnore
boolean merageSuccessBlockDelete = true;
//----------------------------------------------合并 结束-----------------------------------------------------


//----------------------------------------------解压 开始-----------------------------------------------------
@ApiModelProperty("是否开启自动解压(unzipSupportSuffixList 存在 sourceFileSuffix)")
@JsonIgnore
boolean autoUnzip= false;

@ApiModelProperty("解压存放基本路径")
@JsonIgnore
String unzipSaveBasePath;

@ApiModelProperty("解压状态 0:未解压 1:解压中 2:解压完成 100:解压失败")
Integer unzipStatus;

/**
* 解压支持类型,暂只支持.zip,后续可以根据需要扩展
*/
@JsonIgnore
List<String> unzipSupportSuffixList = new ArrayList<String>(Arrays.asList(".zip"));

/**
* unzipType:1
* 类型1
* 111.zip
* 222
* 1.txt
* 解压后:基本路径/222/1.txt
* 类型2
* 111.zip
* 1.txt
* 解压后:基本路径/1.txt
*
* unzipType:2
* unzipCustomName:自定义名称
* 类型1
* 111.zip
* 222
* 1.txt
* 解压后:基本路径/自定义名称/222/1.txt
* 类型2
* 111.zip
* 1.txt
* 解压后:基本路径/自定义名称/1.txt
*/
@ApiModelProperty("解压类型")
@JsonIgnore
int unzipType = 2;

/**
* 解压自定义名称
*/
@ApiModelProperty("自定义解压名称")
@JsonIgnore
String unzipCustomName;

@ApiModelProperty("解压后的文件路径,基本路径的下一级")
@JsonIgnore
Set<String> unzipAfterFilePathList = new HashSet<>();

//@ApiModelProperty("解压后,获取指定后缀的文件")后续可以根据需求,看是否要实现
//HashMap<String,List<String>> unzipAfterSuffixFilePathMap = new HashMap();

@ApiModelProperty("解压失败,删除解压残留文件")
@JsonIgnore
boolean unzipErrorFolderDelete = true;

@ApiModelProperty("解压失败,删除压缩包")
@JsonIgnore
boolean unzipErrorDelete = true;
//----------------------------------------------解压 结束-----------------------------------------------------

@ApiModelProperty("文件上传到oss触发节点,默认文件上传完成(切块文件是合并完成)")
@JsonIgnore
List<OssUploadTriggerNodeEnum> ossUploadTriggerNodeList = new ArrayList<>();

@ApiModelProperty("是否逻辑删除 0:未删除 1:删除")
Integer delete;

@ApiModelProperty("其他1,可根据自己业务填写值")
String otherOne;

@ApiModelProperty("其他2,可根据自己业务填写值")
String otherTwo;

@ApiModelProperty("其他3,可根据自己业务填写值")
String otherThree;

@ApiModelProperty("其他,可根据自己业务填写值")
Map<Object,Object> otherMap;

@ApiModelProperty("创建时间")
String createDate;

@ApiModelProperty("更新时间")
String updateDate;

@ApiModelProperty("程序启动运行,oss定时上传,只有指定的项目注册到nacos才会执行")
@JsonIgnore
Set<ServiceEnum> runServiceSet = new HashSet<>();

@ApiModelProperty("源文件存在校验,模型上传遗留问题")
@JsonIgnore
boolean sourceExistsCheck = true;

@ApiModelProperty("是否使用默认源文件路径,模型公共上传版本迭代问题")
@JsonIgnore
boolean isDefaultSourceFileSaveBasePath = true;

public void setSourceFileSaveBasePath(String sourceFileSaveBasePath) {
this.sourceFileSaveBasePath = sourceFileSaveBasePath;
isDefaultSourceFileSaveBasePath = false;
}

public void setTaskId(String taskId){
if(ParamUtil.paramIsEmpty(unzipCustomName)){
unzipCustomName = taskId;
}
this.taskId = taskId;
}

public String getSourceFileNameSuffix() {
this.sourceFileNameSuffix = sourceFileName+getSourceFileSuffix();
return sourceFileNameSuffix;
}

private void setSourceFileNameSuffix(String sourceFileNameSuffix) {}

/**
* 这个没什么纠结的,只需要传切块基本路径,md5,切块索引即可
* 切块文件 切块基本路径+源文件md5+(sourceFileMd5+"_"+blockFileIndex)切块名称
* @return
*/
public String getBlockFileSavePath() {
this.blockFileSavePath = FileUtil.existsTailPath(blockFileSaveBasePath)+FileUtil.existsTailPath(sourceFileMd5)+sourceFileMd5+"_"+blockFileIndex;
return blockFileSavePath;
}
private void setBlockFileSavePath(String blockFileSavePath){}

/**
* 源文件保存路径
* @return 源文件基本路径+源文件名称(源文件md5+源文件后缀)
*/
public String getSourceFileSavePath() {
if(isDefaultSourceFileSaveBasePath){
this.sourceFileSavePath = FileUtil.existsTailPath(sourceFileSaveBasePath)+FileUtil.existsTailPath(TypeConver.toString(fileUploadType))+sourceFileMd5+getSourceFileSuffix();
}else{
this.sourceFileSavePath = FileUtil.existsTailPath(sourceFileSaveBasePath)+sourceFileMd5+getSourceFileSuffix();
}
return sourceFileSavePath;
}

public String getSourceFileNetPath() {
if(isDefaultSourceFileSaveBasePath){
this.sourceFileNetPath = FileUtil.netPathexistsTailPath(sourceFileNetBasePath)+FileUtil.existsTailPath(TypeConver.toString(fileUploadType))+sourceFileMd5+getSourceFileSuffix();
}else{
this.sourceFileNetPath = FileUtil.netPathexistsTailPath(sourceFileNetBasePath)+sourceFileMd5+getSourceFileSuffix();
}
return sourceFileNetPath;
}

public String getSourceFileSuffix() {
return FileUtil.existsHead(sourceFileSuffix, SpecialSymbolsConstants.spot);
}

private void setSourceFileSavePath(String sourceFileSavePath){}

public boolean getAutoUnzip() {
return autoUnzip;
}

public boolean getAutoMerage() {
return autoMerage;
}

public boolean getUnzipErrorFolderDelete() {
return unzipErrorFolderDelete;
}

public boolean getUnzipErrorDelete() {
return unzipErrorDelete;
}

public boolean getMerageSuccessBlockDelete() {
return merageSuccessBlockDelete;
}

public String getSourceFileSaveRelativePath() {
if(ParamUtil.ParamNullOrEmpty(sourceFileSaveBasePath)){
return sourceFileSaveBasePath;
}
return getSourceFileSavePath().replace(sourceFileSaveBasePath,"");
}

private void setSourceFileSaveRelativePath(String sourceFileSaveRelativePath) {
this.sourceFileSaveRelativePath = sourceFileSaveRelativePath;
}

public Map<String,Object> getResult(){
Map<String,Object> map = new HashMap<>();
map.put("taskId",taskId);
map.put("defectBlockIndexList",defectBlockIndexList);
return map;
}

}
 
package com.bdip.upload.initialization;
import com.bdip.common.utils.TypeConver;
import com.bdip.redisson.RedissonLock;
import com.bdip.upload.enums.ServiceEnum;
import com.bdip.upload.model.FileUpload;
import com.bdip.upload.model.Oss;
import com.bdip.upload.template.ChunkUploadAbstract;
import com.bdip.upload.util.ServiceStatusUtil;
import org.reflections.Reflections;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Component;
import org.springframework.web.context.ServletContextAware;
import org.springframework.web.context.support.WebApplicationContextUtils;
import javax.servlet.ServletContext;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* @Author: wyw
* @Date: 2021/12/1 0001-15:40
* 程序运行执行
*/
@Component
public class FileUploadInitialization<T extends FileUpload> implements ServletContextAware {
Logger logger = LoggerFactory.getLogger(this.getClass());
public static Environment environment;

@Autowired
public void setEnvironment(Environment environment) {
FileUploadInitialization.environment = environment;
}
@Autowired
RedissonLock redissonLock;

//排除的子类
List<String> excludeClass = new ArrayList<>(Arrays.asList("com.bdip.upload.template.demo.ChunkUploadDemo",
"com.bdip.controller.demo.service.ChunkUploadServiceDemo"));

@Override
public void setServletContext(ServletContext servletContext) {
String system = "linux";
//当前系统
String osName = System.getProperty("os.name").toLowerCase();
if(system==null||osName.indexOf(system)==-1){
logger.info("---------windows,不会执行初始化,oss上传任务----------");
return;
}

String ossUploadStatus = TypeConver.toString(environment.getProperty("oss.upload.status"));
if(!ossUploadStatus.equals("true")){
logger.info("------oss上传未开启,开启方法(config配置文件中oss.upload.status=true)------"+ossUploadStatus);
return;
}
//服务名称
String springApplicationName = "oss_"+environment.getProperty("spring.application.name");

ApplicationContext applicationContext = WebApplicationContextUtils.getWebApplicationContext(servletContext);
Reflections reflections = new Reflections();
//Set<Class<? extends OssUploadAbstract>> subTypesOf = reflections.getSubTypesOf(OssUploadAbstract.class);
Set<Class<? extends ChunkUploadAbstract>> chunkUploadAbstractSubs = reflections.getSubTypesOf(ChunkUploadAbstract.class);
for(String c:excludeClass){
try {
Class<?> aClass = Class.forName(c);
chunkUploadAbstractSubs.remove(aClass);
}catch (Exception e){
e.printStackTrace();
}
}
ConcurrentHashMap<Class<? extends ChunkUploadAbstract>,String> concurrentHashMap = new ConcurrentHashMap<>();
List<Boolean> currLock = new ArrayList<>(); //是否当前程序的锁
currLock.add(false);

//获取锁
logger.info("------开始上锁");
new Thread(()->{
while (true){
try {
logger.info("------开始上传上锁------锁名称:"+springApplicationName);
if(currLock.get(0)){
System.out.println("---------------------------------当前程序在运行"+springApplicationName);
}else{
if(redissonLock.isLock(springApplicationName)){
logger.info("------锁------"+springApplicationName+"------已被占用");
}else{
logger.info("------开始上锁------"+springApplicationName);
redissonLock.lock(springApplicationName);
currLock.set(0,true);
for(Class<? extends ChunkUploadAbstract> cl:chunkUploadAbstractSubs){
concurrentHashMap.put(cl,"");
}
}
}
}catch (Exception e){
e.printStackTrace();
}
try {
logger.info("------锁-----休眠2分钟");
Thread.sleep(1000*60*2);
} catch (Exception e) {
e.printStackTrace();
}
}
}).start();

logger.info("------开始模型中断初始化------线程");
new Thread(()->{
logger.info("开始模型中断初始化------线程-----开始");
while (true){
try {
if(currLock.get(0)){
if(concurrentHashMap.size()==0){
logger.info("------初始化全部完成");
}else{
for(Class<? extends ChunkUploadAbstract> cl : concurrentHashMap.keySet()){
try {
ChunkUploadAbstract<T> chunkUploadAbstract = applicationContext.getBean(cl);
T t = chunkUploadAbstract.getUploadInitParam();
if(t!=null){
Set<ServiceEnum> startProjectSet = t.getRunServiceSet();
if(!ServiceStatusUtil.isAllRunService(startProjectSet)){
continue;
}
}
//模型中断初始化
chunkUploadAbstract.uploadInit(t);
concurrentHashMap.remove(cl);
}catch (Exception e){
e.printStackTrace();
}
}
}
}
}catch (Exception e){
e.printStackTrace();
}
try {
logger.info("------文件中断初始化-----休眠2分钟");
Thread.sleep(1000*60*2);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}).start();

new Thread(()->{
logger.info("------开始上传到oss-----线程-----开始");
while (true){
try {
if(currLock.get(0)) {
for (Class<? extends ChunkUploadAbstract> cl : chunkUploadAbstractSubs) {
try {
ChunkUploadAbstract<T> chunkUploadAbstract = applicationContext.getBean(cl);
T t = chunkUploadAbstract.getOssUploadTask();
if (t != null) {
//是否开启oss上传
if (t.getAutoOssUpload()) {
Set<ServiceEnum> startProjectSet = t.getRunServiceSet();
if(ServiceStatusUtil.isAllRunService(startProjectSet)){
//上传文件到oss
Oss oss = chunkUploadAbstract.getOssInfo();
if (oss != null) {
logger.info("------开始oss上传------taskId:" + t.getTaskId() + "------id:" + t.getId());
t.setOss(oss);
chunkUploadAbstract.ossBeakpointContinuation(t);
logger.info("------开始oss上传执行完成");
}
}
}
}

} catch (Exception e) {
e.printStackTrace();
}
}
}
}catch (Exception e){
e.printStackTrace();
}
try {
logger.info("------文件oss上传-----休眠2分钟");
Thread.sleep(1000*60*2);
} catch (InterruptedException e) {
e.printStackTrace();
}
// finally {
// //锁释放
// if(redissonLock.isHeldByCurrentThread(springApplicationName)&&redissonLock.isLock(springApplicationName)){
// logger.info("------开始释放锁------");
// redissonLock.unlock(springApplicationName);
// }
// }
}
}).start();
}

}

package com.bdip.upload.template;
import com.bdip.common.result.CommonResult;
import com.bdip.common.result.EnumResultCode;
import com.bdip.common.utils.FileUtil;
import com.bdip.common.utils.ParamUtil;
import com.bdip.common.utils.StringUtil;
import com.bdip.common.utils.TypeConver;
import com.bdip.upload.enums.FileUploadTypeEnum;
import com.bdip.upload.model.FileUpload;
import com.bdip.upload.util.ConfigurationFileUtil;
import org.apache.commons.lang3.StringUtils;
import java.io.*;
import java.nio.charset.Charset;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* @Author: wyw
* @Date: 2021/12/20 0020-11:56
*/
public abstract class FileUploadAbstract<T extends FileUpload> extends OssUploadAbstract<T> {

//文件上传
public abstract CommonResult fileUpload(T t);

//文件块上传完成调用
public abstract CommonResult fileUploadSuccess(T t);

/**
* 文件解压
* @param t
* @return
*/
public CommonResult fileUnzip(T t){
String methodInfo = "FileUploadAbstract fileUnzip";
String sourceFileSuffix = t.getSourceFileSuffix(); //源文件后缀
String sourceFileSavePath = t.getSourceFileSavePath(); //源文件存放路径
String unzipSaveBasePath = t.getUnzipSaveBasePath(); //压缩包解压存放基本路径
List<String> unzipSupportSuffixList = t.getUnzipSupportSuffixList();
String msg = "文件解压缩包路径:"+sourceFileSavePath+"解压路径基本:"+unzipSaveBasePath;
boolean fileUnZip = false;
switch (sourceFileSuffix){
case ".zip":
fileUnZip = unzip(t);
break;
default:
return CommonResult.setCommonResult(EnumResultCode.FILE_UNZIP_FORMAT_NO_SUPPER,"解压文件压缩包格式不支持(unzipSupportSuffixList),支持的格式:"+StringUtils.join(unzipSupportSuffixList));
}
splicingLog(t,methodInfo,msg);

if(!fileUnZip){ //解压失败
//解压失败
if(t.getUnzipErrorDelete()){
splicingLog(t,methodInfo,"删除解压失败的压缩包"+sourceFileSavePath);
FileUtil.fileDelete(sourceFileSavePath);
}

//删除解压失败过程中的残留文件
if(t.getUnzipErrorFolderDelete()){
splicingLog(t,methodInfo,"删除解压失败过程中的残留文件");
Set<String> unzipAfterFilePathList = t.getUnzipAfterFilePathList();
for(String filePath:unzipAfterFilePathList){
FileUtil.fileDelete(filePath);
}
}
t.setUnzipStatus(100);
return CommonResult.setCommonResult(EnumResultCode.FILE_UNZIP_FAILED,msg);
}
return CommonResult.success();
}


public boolean unzip(T t){
String fileRelativePath = null; //文件相对路径 基本路径+当前解压文件夹名称(有解压类型决定)
List<String> unzipPathList = new ArrayList<>(); //解压文件存放路径
String zipPath = t.getSourceFileSavePath(); //压缩包路径
int unzipType = t.getUnzipType(); //解压类型
String unzipSaveBasePath = FileUtil.existsTailPath(t.getUnzipSaveBasePath()); //解压保存路径
Set<String> unzipAfterFilePathList = t.getUnzipAfterFilePathList(); //解压后文件存放路径,基本路径的下一级

boolean isSuccess = true;
OutputStream out = null;
InputStream in = null;
ZipFile zip = null;
try{
//压缩包文件不存在
File zipFile = new File(zipPath);
if(!zipFile.exists()){
isSuccess = false;
}

//类型处理
switch (unzipType){
case 1:
break;
case 2:
unzipSaveBasePath = unzipSaveBasePath+FileUtil.existsTailPath(TypeConver.toString(t.getUnzipCustomName()));
fileRelativePath = unzipSaveBasePath;
break;
default:
return false;
}

File pathFile = new File(unzipSaveBasePath);
if (!pathFile.exists()) {
pathFile.mkdirs();
}

zip = new ZipFile(zipFile, Charset.forName("GBK"));
switch (unzipType){
case 1:
Map<String, String> zipMap = FileUtil.zipStructure(zip);
if(zipMap.get("isFolder").equals("true")){
fileRelativePath = unzipSaveBasePath + FileUtil.existsTailPath(TypeConver.toString(zipMap.get("nextLevelName")));
}
break;
}
for (Enumeration entries = zip.entries(); entries.hasMoreElements();) {
ZipEntry entry = (ZipEntry) entries.nextElement();
String zipEntryName = entry.getName();
in = zip.getInputStream(entry);
String outPath= "";

//类型处理
if(unzipType==1||unzipType==2){
outPath = (FileUtil.existsTailPath(unzipSaveBasePath) + zipEntryName).replaceAll("\\\\", "/");
}

// 判断路径是否存在,不存在则创建文件路径
int index = outPath.lastIndexOf('/');
index = (index>0?index:outPath.length());
String currPath = outPath.substring(0,index);
File file = new File(currPath);
if (!file.exists()) {
file.mkdirs();
}
unzipPathList.add(outPath);
// 判断文件全路径是否为文件夹,如果是上面已经上传,不需要解压
if (new File(outPath).isDirectory()) {
continue;
}
// 输出文件路径信息
out = new FileOutputStream(outPath);
byte[] buf1 = new byte[1024];
int len;
while ((len = in.read(buf1)) > 0) {
out.write(buf1, 0, len);
}
//必须要释放要不然不能删除
if (in != null) {
in.close();
}
if (out != null) {
out.close();
}
}
}catch(Exception e){
isSuccess =false;
e.printStackTrace();
}finally {
try {
//必须要释放要不然不能删除
if (in != null) {
in.close();
}
} catch (IOException e) {
isSuccess =false;
e.printStackTrace();
}

try {
if (out != null) {
out.close();
}
}catch (IOException e) {
isSuccess =false;
e.printStackTrace();
}

try {
if(zip != null) {
zip.close();
}
}catch (IOException e) {
isSuccess =false;
e.printStackTrace();
}
}
if(fileRelativePath==null){
unzipAfterFilePathList.addAll(unzipPathList);
}else{
unzipAfterFilePathList.add(fileRelativePath);
}
return isSuccess;
}


public String splicingLog(T t,String methodName,String... logs){
boolean printLog = t.getPrintLog();
boolean b = ConfigurationFileUtil.getFileUploadPrintLog(environment);
if(printLog&&b){
String logStr = "----"+methodName+"----id:"+t.getId()+"----taskId:"+t.getTaskId()+"----fileUploadType:"+t.getFileUploadType();
logger.info(logStr);

String logStr2 = "";
for(String log:logs){
logStr2 += "----"+log+"----";
}
logger.info(logStr2);
}
return "";
}

/**
* 参数校验
* @param t
* @return
*/
public CommonResult parameterCheck(T t){
String taskId = t.getTaskId(); //上传任务编号
if(ParamUtil.ParamNullOrEmpty(taskId)){
taskId = StringUtil.getUUID();
t.setTaskId(taskId);
}

FileUploadTypeEnum fileUploadTypeEnum = t.getFileUploadType();
if(ParamUtil.ParamNullOrEmpty(fileUploadTypeEnum)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"上传类型没赋值(fileUploadType)");
}

// String sourceFileName = t.getSourceFileName();
// if(ParamUtil.ParamNullOrEmpty(sourceFileName)){
// return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"源文件名称没赋值(sourceFileName)");
// }

String sourceFileSuffix = t.getSourceFileSuffix();
if(ParamUtil.ParamNullOrEmpty(sourceFileSuffix)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"源文件后缀没赋值(sourceFileSuffix)");
}

List<String> unzipSupportSuffixList = t.getUnzipSupportSuffixList();
boolean autoUnzip = t.getAutoUnzip();
String unzipSaveBasePath = t.getUnzipSaveBasePath(); //解压文件存放路径
if(autoUnzip){
if(unzipSupportSuffixList.contains(sourceFileSuffix)){
if(ParamUtil.ParamNullOrEmpty(unzipSaveBasePath)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"autoUnzip = ture,开启了自动解压,解压后文件存放基本路径没赋值(unzipSaveBasePath)");
}else{
splicingLog(t,"","开启了自动解压,关闭方式,autoUnzip = false");
}
}else{
return CommonResult.setCommonResult(EnumResultCode.FILE_UNZIP_FORMAT_NO_SUPPER,"解压文件压缩包格式不支持(unzipSupportSuffixList),支持的格式:"+StringUtils.join(unzipSupportSuffixList));
}
}else{
splicingLog(t,"未开启自动解压,开启方法为(autoUnzip=true 且 unzipSupportSuffixList 存在 sourceFileSuffix)");
}
return CommonResult.success();
}


/*public final CommonResult fileUnzip(T t){
String methodInfo = "fileUnzip(文件解压)";
try {
splicingLog(t,methodInfo,"开始");

String sourceFileSuffix = t.getSourceFileSuffix(); //源文件后缀
String unzipSaveBasePath = t.getUnzipSaveBasePath(); //压缩包解压存放基本路径
String sourceFileSavePath = t.getSourceFileSavePath(); //源文件存放路径
boolean ossUpload = t.getOssUpload(); //是否开启oss上传
List<OssUploadTriggerNodeEnum> ossUploadTriggerNodeList = t.getOssUploadTriggerNodeList(); //oss上传触发节点

t.setUnzipStatus(1);
splicingLog(t,methodInfo,"开始调用接口,fileUnzipBefore(文件解压前调用)","文件路径,压缩包路径:"+sourceFileSavePath,"解压基本路径:"+unzipSaveBasePath);
CommonResult commonResult = fileUnzipBefore(t);
splicingLog(t,methodInfo,"调用接口完成,fileUnzipBefore(文件解压前调用)");
if(commonResult.getCode()!=200){
return commonResult;
}

boolean fileUnZip = false;
switch (sourceFileSuffix){
case ".zip":
fileUnZip = unzip(t);
}
//文件解压成功
if(fileUnZip){
t.setUnzipStatus(2);
splicingLog(t,methodInfo,"文件解压成功","开始调用接口 fileUnzipSuccess(解压成功调用)");
commonResult = fileUnzipSuccess(t);
splicingLog(t,methodInfo,"调用接口完成,fileUnzipSuccess(解压成功调用)");
if(commonResult.getCode()!=200){
splicingLog("");
return commonResult;
}

//是否开启上传到oss
if(ossUpload&&ossUploadTriggerNodeList.contains(OssUploadTriggerNodeEnum.fileUnzip)){
splicingLog(t,methodInfo,"开始上传oss");
Set<String> unzipAfterFilePathList = t.getUnzipAfterFilePathList();
t.setOssLocalFilePathSet(unzipAfterFilePathList);
commonResult = ossUpload(t);
if(commonResult.getCode()!=200){
return commonResult;
}
}else{
splicingLog(t,methodInfo,"未开启oss上传,不会调用oss上传有关的接口,开启方式(ossUpload=true,ossUploadTriggerNodeList存在OssUploadTriggerNodeEnum.fileUnzip)");
}

return commonResult;
}else{
splicingLog(t,methodInfo,"文件解压失败,压缩包路径:"+sourceFileSavePath,"解压路径:"+unzipSaveBasePath);
//解压失败
if(t.getUnzipErrorDelete()){
splicingLog(t,methodInfo,"删除解压失败的压缩包");
FileUtil.fileDelete(sourceFileSavePath);
}

//删除解压失败过程中的残留文件
if(t.getUnzipErrorFolderDelete()){
splicingLog(t,methodInfo,"删除解压失败过程中的残留文件");
Set<String> unzipAfterFilePathList = t.getUnzipAfterFilePathList();
for(String filePath:unzipAfterFilePathList){
FileUtil.fileDelete(filePath);
}
}
t.setUnzipStatus(100);
commonResult = fileUnzipError(t);
if(commonResult.getCode()!=200){
splicingLog("fileUnzipError,返回code不是200");
return commonResult;
}
return CommonResult.setCommonResult(EnumResultCode.FILE_UNZIP_FAILED);
}
}catch (Exception e){
splicingLog(t,methodInfo,"文件解压失败");
e.printStackTrace();
try {
t.setUnzipStatus(100);
fileUnzipError(t);
}catch (Exception ee){
ee.printStackTrace();
}
return CommonResult.setCommonResult(EnumResultCode.FILE_UNZIP_FAILED);
}
}*/
}
package com.bdip.upload.template;
import com.bdip.common.result.CommonResult;
import com.bdip.common.result.EnumResultCode;
import com.bdip.common.utils.FileUtil;
import com.bdip.common.utils.ParamUtil;
import com.bdip.upload.model.Oss;
import com.bdip.upload.util.ConfigurationFileUtil;
import com.bdip.upload.util.OssUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import java.io.File;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @Author: wyw
* @Date: 2021/12/8 0008-16:37
*/
public abstract class OssUploadAbstract<O extends Oss>{
@Autowired
Environment environment;

public Logger logger = LoggerFactory.getLogger(OssUploadAbstract.class);

//文件上传到oss前调用
public CommonResult fileOssUploadBefore(O o){
splicingLog(o,"fileOssUploadBefore","oss上传前默认调用");
return CommonResult.success();
};

//文件上传到oss成功调用
public CommonResult fileOssUploadSuccess(O o){
splicingLog(o,"fileOssUploadSuccess","oss上传成功默认调用");
return CommonResult.success();
};

//文件上传到oss失败调用
public CommonResult fileOssUploadError(O o){
splicingLog(o,"fileOssUploadError","oss上传失败默认调用");
return CommonResult.success();
};

/**
* 获取oss信息 只需要 endpoint(区域)、accessKeyId(访问密钥)、accessKeySecret(访问密钥密码)、bucketName(桶名称)、ossLocalRecordBasePath(断点续传基本路径,断点续传需要)
* 如果不使用默认的,可以在子类重写
* @return
*/
public Oss getOssInfo(){
return ConfigurationFileUtil.getOss(environment);
};


private CommonResult ossUpload(O o,int type,String methodName){
try {
//获取oss信息
o.setOss(getOssInfo());
//参数校验
CommonResult commonResult = ossParameterCheck(o);
if(commonResult.getCode()!=200){
CommonResult c = isOssUploadError(methodName,o,commonResult);
if(c!=null){
return c;
}
return commonResult;
}

Set<String> ossLocalFilePathSet = o.getOssLocalFilePathSet();
List<String> ossUploadExcludeFilePathList = o.getOssUploadExcludeFilePathList(); //排除文件信息
splicingLog(o,methodName,"开始调用接口,oss上传前调用(fileOssUploadBefore)");
commonResult = fileOssUploadBefore(o);
int code = commonResult.getCode();
splicingLog(o,methodName,"接口调用完成,oss上传前调用(fileOssUploadBefore)","code:"+code);
if(code!=200){
splicingLog(o,methodName,"接口调用异常了,oss上传前调用(fileOssUploadBefore)");
CommonResult c = isOssUploadError(methodName,o,commonResult);
if(c!=null){
return c;
}
return commonResult;
}

for(String path:ossLocalFilePathSet){
File file = new File(path);
if(!file.exists()){
String errorInfo = "oss上传文件(ossLocalFilePathSet)不存在:"+path;
splicingLog(o,methodName,errorInfo);
commonResult = CommonResult.setCommonResult(EnumResultCode.OSS_UPLOAD_FILE_NO_EXISTS,errorInfo);
CommonResult c = isOssUploadError(methodName,o,commonResult);
if(c!=null){
return c;
}
return commonResult;
}

if(file.isDirectory()){
Set<String> uploadFilePathSet = new HashSet<>();
//获取排除后的文件
FileUtil.getExcludeSubFilePath(path,ossUploadExcludeFilePathList,uploadFilePathSet);
splicingLog(o,methodName,"排除"+ossUploadExcludeFilePathList+"后","剩余文件个数"+uploadFilePathSet.size());
for(String uploadFilePath:uploadFilePathSet){
o.setOssLocalFilePath(uploadFilePath);
if(type==1){
//断点续传
commonResult = OssUtil.blockUpload(o);
}else{
//整体文件上传
commonResult = OssUtil.fileUpload(o);
}
}
} else {
o.setOssLocalFilePath(file.getPath());
if(type==1){
//断点续传
commonResult = OssUtil.blockUpload(o);
}else{
//整体文件上传
commonResult = OssUtil.fileUpload(o);
}
}

if(commonResult.getCode()!=200){
//oss上传失败
CommonResult c = isOssUploadError(methodName,o,commonResult);
if(c!=null){
return c;
}
return commonResult;
}
}

//oss上传完成
splicingLog(o,methodName,"开始调用接口,oss上传完成调用(fileOssUploadSuccess)");
commonResult = fileOssUploadSuccess(o);
code = commonResult.getCode();
splicingLog(o,methodName,"调用接口完成,oss上传完成调用(fileOssUploadSuccess)","code:"+code);
if(code!=200){
CommonResult c = isOssUploadError(methodName,o,commonResult);
if(c!=null){
return c;
}
return commonResult;
}
return commonResult;
}catch (Exception e){
splicingLog(o,methodName,"抛出异常了","开始调用fileOssUploadError(oss上传失败)");
e.printStackTrace();
try {
CommonResult c = isOssUploadError(methodName,o,CommonResult.setCommonResult(EnumResultCode.OSS_UPLOAD_FAILED));
if(c!=null){
return c;
}
}catch (Exception ee){
ee.printStackTrace();
}
return CommonResult.setCommonResult(EnumResultCode.OSS_UPLOAD_FAILED);
}
}
/**
* oss断点续传
* oss上传文件,单个文件或文件夹
* @return
*/
public final CommonResult ossBeakpointContinuation(O o){
String methodName = "ossBeakpointContinuation(oss断点上传)";
//oss参数校验
return ossUpload(o,1,methodName);
}

/**
* oss 上传
* @return
*/
public final CommonResult ossUpload(O o){
String methodName = "OssUpload(oss完整上传)";
return ossUpload(o,2,methodName);
}

public String splicingLog(O o,String methodName,String... logs){
boolean printLog = o.getPrintLog();
boolean b = ConfigurationFileUtil.getFileUploadPrintLog(environment);
if(printLog&&b){
String logStr = "----"+methodName;
for(String log:logs){
logStr += "----"+log+"----";
}
logger.info(logStr);
}
return "";
}
public CommonResult ossParameterCheck(O o){
String endpoint = o.getEndpoint();
if(ParamUtil.ParamNullOrEmpty(endpoint)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"区域没赋值(endpoint)");
}

String accessKeyId = o.getAccessKeyId();
if(ParamUtil.ParamNullOrEmpty(accessKeyId)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"访问密钥没赋值(accessKeyId)");
}

String accessKeySecret = o.getAccessKeySecret();
if(ParamUtil.ParamNullOrEmpty(accessKeySecret)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"访问密钥密码没赋值(accessKeySecret)");
}

String bucketName = o.getBucketName();
if(ParamUtil.ParamNullOrEmpty(bucketName)){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"桶名称没赋值(bucketName)");
}

//String ossLocalFilePath = t.getOssLocalFilePath();
Set<String> ossLocalFilePathSet = o.getOssLocalFilePathSet();
if(ParamUtil.ParamNullOrEmpty(ossLocalFilePathSet)||ossLocalFilePathSet.size()==0){
return CommonResult.setCommonResult(EnumResultCode.PARAM_NULL,"本地文件路径没赋值(ossLocalFilePathSet)");
}
return CommonResult.success();
}

private CommonResult isOssUploadError(String methodName,O o,CommonResult errorCommonResult){
o.setErrorCommonResult(errorCommonResult);
splicingLog(o,methodName,"开始调用接口,fileOssUploadError(oss上传失败调用),code:"+errorCommonResult.getCode());
CommonResult commonResult = fileOssUploadError(o);
int code = commonResult.getCode();
splicingLog(o,methodName,"调用接口完成,fileOssUploadError(oss上传失败调用)","code:"+code);
if(code!=200){
splicingLog(o,methodName,"调用接口异常,fileOssUploadError(oss上传失败调用)");
return commonResult;
}
return null;
}
}
package com.bdip.upload.model;
import com.bdip.common.result.CommonResult;
import com.bdip.common.utils.FileUtil;
import com.bdip.common.utils.ParamUtil;
import com.bdip.upload.enums.OssUploadTriggerNodeEnum;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import org.springframework.util.DigestUtils;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.*;
import java.util.regex.Matcher;
/**
* @Author: wyw
* @Date: 2021/12/1 0001-10:36
*/
@Data
@ApiModel("oss")
public class Oss {
public Oss(){}
public Oss(String endpoint,String accessKeyId,String accessKeySecret,String bucketName){
this.endpoint = endpoint;
this.accessKeyId = accessKeyId;
this.accessKeySecret = accessKeySecret;
this.bucketName = bucketName;
}
@ApiModelProperty("区域")
String endpoint;

@ApiModelProperty("访问密钥")
String accessKeyId;

@ApiModelProperty("访问密钥密码")
String accessKeySecret;

@ApiModelProperty("桶名称")
String bucketName;

@ApiModelProperty("oss保存路径")
String ossSavePath;

@ApiModelProperty("本地文件路径,文件夹:会上传文件夹下所有文件")
String ossLocalFilePath;

@ApiModelProperty("本地文件路径,文件夹:会上传文件夹下所有文件")
Set<String> ossLocalFilePathSet = new HashSet<>();

@ApiModelProperty("本地文件md5")
String ossLocalFileMd5;

@ApiModelProperty("移除本地文件上传到oss的部分路径,其他的作为oss的路径")
//如果 ossLocalPath = /data/bdip/static/model/11111/3d.svf
// removeOssLocalBasePath = /data/bdip/static/
// oss上文件保存路径 = model/11111/3d.svf
String ossRemoveLocalBasePath;

@ApiModelProperty("本地断点续传保存路径")
String ossLocalRecordPath;

@ApiModelProperty("本地断点续传基本路径")
String ossLocalRecordBasePath;

@ApiModelProperty("oss上传排除文件 名称:排除所有名称相同的文件 后缀:排除所有后缀相同的文件 全名称:排除全名称相同的文件")
List<String> ossUploadExcludeFilePathList = new ArrayList<>(Arrays.asList(""));

@ApiModelProperty("是否上传oss")
boolean autoOssUpload = false;

@ApiModelProperty("oss上传状态 0:未上传 1:上传中 2:上传完成 100:上传失败")
Integer ossStatus;

@ApiModelProperty("同步还是异步 true:同步 false:异步 暂时不考虑")
boolean synchronous = true;

//@ApiModelProperty("文件上传进度 暂时不考虑")
//Map<String,Integer> fileUploadSpeedMap;

@ApiModelProperty("文件上传进度key 暂时不考虑")
String fileUploadSpeedKey;

@ApiModelProperty("从STS服务获取的安全令牌(SecurityToken)")
String securityToken;

@ApiModelProperty("角色ARN")
String roleArn;

@ApiModelProperty("角色会话名称")
String roleSessionName;

@ApiModelProperty("有效时间秒")
Long durationSeconds;

@ApiModelProperty("到期时间时间戳")
Long expirationTimeStamp;

@ApiModelProperty("到期时间")
String expirationDate;

@ApiModelProperty("地域")
String region;

@ApiModelProperty("oss-地域(前端使用)")
String oSSregion;

@ApiModelProperty("错误信息")
CommonResult errorCommonResult;

@ApiModelProperty("是否打印日志")
boolean printLog = true;

public boolean getAutoOssUpload() {
return autoOssUpload;
}

public void setOssLocalFilePath(String ossLocalFilePath) {
this.ossLocalFilePath = ossLocalFilePath;
}

public String getOssSavePath() {
if(ParamUtil.ParamNullOrEmpty(ossRemoveLocalBasePath)){
if(ParamUtil.ParamNullOrEmpty(ossLocalFilePath)){
return ossLocalFilePath;
}
return FileUtil.noExistsHeadPath(ossLocalFilePath).replaceAll("\\\\",Matcher.quoteReplacement("/"));
}
return FileUtil.noExistsHeadPath(ossLocalFilePath.replace(ossRemoveLocalBasePath,"")).replaceAll("\\\\",Matcher.quoteReplacement("/"));
}

public String getOssLocalRecordPath() {
if(ParamUtil.ParamNullOrEmpty(ossLocalRecordBasePath)){
return null;
}
try {
FileInputStream fileInputStream = new FileInputStream(ossLocalFilePath);
ossLocalFileMd5 = FileUtil.getFileMd5(fileInputStream);
fileInputStream.close();
return FileUtil.existsTailPath(ossLocalRecordBasePath) + DigestUtils.md5DigestAsHex((FileUtil.existsTailPath(ossLocalFileMd5)+getOssSavePath()).getBytes())+".record";
} catch (IOException e) {
e.printStackTrace();
return null;
}
}

private void setOssLocalRecordPath(String ossLocalRecordPath){}

public void setOss(Oss oss){
this.endpoint = oss.getEndpoint();
this.accessKeyId = oss.getAccessKeyId();
this.accessKeySecret = oss.getAccessKeySecret();
this.bucketName = oss.getBucketName();
this.ossLocalRecordBasePath = oss.getOssLocalRecordBasePath();
}

public String getoSSregion() {
return "oss-"+region;
}
public boolean getPrintLog(){
return this.printLog;
}

public void setPrintLog(boolean printLog) {
this.printLog = printLog;
}
}


 
 

posted on 2022-07-13 10:37  小泥爪子  阅读(87)  评论(0)    收藏  举报

导航