package com.xxl.job.admin.controller; import com.xxl.job.admin.core.exception.XxlJobException; import com.xxl.job.admin.core.model.XxlJobGroup; import com.xxl.job.admin.core.model.XxlJobInfo; import com.xxl.job.admin.core.model.XxlJobLog; import com.xxl.job.admin.core.scheduler.XxlJobScheduler; import com.xxl.job.admin.core.util.I18nUtil; import com.xxl.job.admin.service.IXxlJobGroupService; import com.xxl.job.admin.service.IXxlJobInfoService; import com.xxl.job.admin.service.IXxlJobLogService; import com.xxl.job.core.biz.ExecutorBiz; import com.xxl.job.core.biz.model.LogResult; import com.xxl.job.core.biz.model.ReturnT; import com.xxl.job.core.util.DateUtil; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import javax.servlet.http.HttpServletRequest; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; /** * index controller * * @author xuxueli 2015-12-19 16:13:16 */ @Controller @RequestMapping("/joblog") @Slf4j public class JobLogController { @Autowired private IXxlJobGroupService xxlJobGroupService; @Autowired public IXxlJobInfoService xxlJobInfoService; @Autowired public IXxlJobLogService xxlJobLogService; @RequestMapping public String index(HttpServletRequest request, Model model, @RequestParam(required = false, defaultValue = "0") Integer jobId) { // 执行器列表 List jobGroupList_all = xxlJobGroupService.list(); // filter group List jobGroupList = JobInfoController.filterJobGroupByRole(request, jobGroupList_all); if (jobGroupList == null || jobGroupList.size() == 0) { throw new XxlJobException(I18nUtil.getString("jobgroup_empty")); } model.addAttribute("JobGroupList", jobGroupList); // 任务 if (jobId > 0) { XxlJobInfo jobInfo = xxlJobInfoService.getById(jobId); if (jobInfo == null) { throw new RuntimeException(I18nUtil.getString("jobinfo_field_id") + I18nUtil.getString("system_unvalid")); } model.addAttribute("jobInfo", jobInfo); // valid permission JobInfoController.validPermission(request, jobInfo.getJobGroup()); } return "joblog/joblog.index"; } @RequestMapping("/getJobsByGroup") @ResponseBody public ReturnT> getJobsByGroup(int jobGroup) { List list = xxlJobInfoService.getJobsByGroup(jobGroup); return new ReturnT>(list); } @RequestMapping("/pageList") @ResponseBody public Map pageList(HttpServletRequest request, @RequestParam(required = false, defaultValue = "0") int start, @RequestParam(required = false, defaultValue = "10") int length, int jobGroup, int jobId, int logStatus, String filterTime) { // valid permission JobInfoController.validPermission(request, jobGroup); // 仅管理员支持查询全部;普通用户仅支持查询有权限的 jobGroup // parse param Date triggerTimeStart = null; Date triggerTimeEnd = null; if (filterTime != null && filterTime.trim().length() > 0) { String[] temp = filterTime.split(" - "); if (temp.length == 2) { triggerTimeStart = DateUtil.parseDateTime(temp[0]); triggerTimeEnd = DateUtil.parseDateTime(temp[1]); } } // page query List list = xxlJobLogService.pageList(start, length, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus); int list_count = xxlJobLogService.pageListCount(start, length, jobGroup, jobId, triggerTimeStart, triggerTimeEnd, logStatus); // package result Map maps = new HashMap(); maps.put("recordsTotal", list_count); // 总记录数 maps.put("recordsFiltered", list_count); // 过滤后的总记录数 maps.put("data", list); // 分页列表 return maps; } @RequestMapping("/logDetailPage") public String logDetailPage(int id, Model model) { // base check ReturnT logStatue = ReturnT.SUCCESS; XxlJobLog jobLog = xxlJobLogService.getById(id); if (jobLog == null) { throw new RuntimeException(I18nUtil.getString("joblog_logid_unvalid")); } model.addAttribute("triggerCode", jobLog.getTriggerCode()); model.addAttribute("handleCode", jobLog.getHandleCode()); model.addAttribute("executorAddress", jobLog.getExecutorAddress()); model.addAttribute("triggerTime", jobLog.getTriggerTime().getTime()); model.addAttribute("logId", jobLog.getId()); return "joblog/joblog.detail"; } @RequestMapping("/logDetailCat") @ResponseBody public ReturnT logDetailCat(String executorAddress, long triggerTime, long logId, int fromLineNum) { try { ExecutorBiz executorBiz = XxlJobScheduler.getExecutorBiz(executorAddress); ReturnT logResult = executorBiz.log(triggerTime, logId, fromLineNum); // is end if (logResult.getContent() != null && logResult.getContent().getFromLineNum() > logResult.getContent().getToLineNum()) { XxlJobLog jobLog = xxlJobLogService.getById(logId); if (jobLog.getHandleCode() > 0) { logResult.getContent().setEnd(true); } } return logResult; } catch (Exception e) { log.error(e.getMessage(), e); return new ReturnT(ReturnT.FAIL_CODE, e.getMessage()); } } @RequestMapping("/logKill") @ResponseBody public ReturnT logKill(int id) { // base check XxlJobLog jobLog = xxlJobLogService.getById(id); XxlJobInfo jobInfo = xxlJobInfoService.getById(jobLog.getJobId()); if (jobInfo == null) { return new ReturnT(500, I18nUtil.getString("jobinfo_glue_jobid_unvalid")); } if (ReturnT.SUCCESS_CODE != jobLog.getTriggerCode()) { return new ReturnT(500, I18nUtil.getString("joblog_kill_log_limit")); } // request of kill ReturnT runResult = null; try { ExecutorBiz executorBiz = XxlJobScheduler.getExecutorBiz(jobLog.getExecutorAddress()); runResult = executorBiz.kill(jobInfo.getId()); } catch (Exception e) { log.error(e.getMessage(), e); runResult = new ReturnT(500, e.getMessage()); } if (ReturnT.SUCCESS_CODE == runResult.getCode()) { jobLog.setHandleCode(ReturnT.FAIL_CODE); jobLog.setHandleMsg(I18nUtil.getString("joblog_kill_log_byman") + ":" + (runResult.getMsg() != null ? runResult.getMsg() : "")); jobLog.setHandleTime(new Date()); xxlJobLogService.updateHandleInfo(jobLog); return new ReturnT(runResult.getMsg()); } else { return new ReturnT(500, runResult.getMsg()); } } @RequestMapping("/clearLog") @ResponseBody public ReturnT clearLog(int jobGroup, int jobId, int type) { Date clearBeforeTime = null; int clearBeforeNum = 0; if (type == 1) { // 清理一个月之前日志数据 clearBeforeTime = DateUtil.addMonths(new Date(), -1); } else if (type == 2) { // 清理三个月之前日志数据 clearBeforeTime = DateUtil.addMonths(new Date(), -3); } else if (type == 3) { // 清理六个月之前日志数据 clearBeforeTime = DateUtil.addMonths(new Date(), -6); } else if (type == 4) { // 清理一年之前日志数据 clearBeforeTime = DateUtil.addYears(new Date(), -1); } else if (type == 5) { // 清理一千条以前日志数据 clearBeforeNum = 1000; } else if (type == 6) { // 清理一万条以前日志数据 clearBeforeNum = 10000; } else if (type == 7) { // 清理三万条以前日志数据 clearBeforeNum = 30000; } else if (type == 8) { // 清理十万条以前日志数据 clearBeforeNum = 100000; } else if (type == 9) { // 清理所有日志数据 clearBeforeNum = 0; } else { return new ReturnT(ReturnT.FAIL_CODE, I18nUtil.getString("joblog_clean_type_unvalid")); } List logIds = null; do { logIds = xxlJobLogService.findClearLogIds(jobGroup, jobId, clearBeforeTime, clearBeforeNum, 1000); if (logIds != null && logIds.size() > 0) { xxlJobLogService.clearLog(logIds); } } while (logIds != null && logIds.size() > 0); return ReturnT.SUCCESS; } }