afl 插装编译,以及配置文件
This commit is contained in:
parent
07544335ca
commit
5419ee5dce
@ -0,0 +1,6 @@
|
||||
package cd.casic.ci.process.engine.constant;
|
||||
|
||||
public class AFLSlotCompileConstant {
|
||||
public static final String MANAGER_ID = "managerId";
|
||||
public static final String COMMAND_SCRIPT ="buildScript";
|
||||
}
|
@ -4,26 +4,28 @@ public class PipelineGlobalVariableConstant {
|
||||
/**
|
||||
* AFL线基础工作目录,完整的工作目录为 基础工作目录 + /PIP_${pipelineId}
|
||||
* */
|
||||
public static final String AFL_WORK_DIR_PREFIX = "/home/casic/706/yunqi/PIP_";
|
||||
public static final String AFL_DOCKER_WORK_DIR_PREFIX = "PIP_";
|
||||
public static final String AFL_DOCKER_BASE_DIR = "/test";
|
||||
public static final String AFL_VOLUME_WORK_DIR_PREFIX = "/home/casic/706/yunqi";
|
||||
/**
|
||||
* 真正用于绑定AFL docker卷挂载的目录的全局变量的key,值为 基础工作目录 + /PIP_${pipelineId}
|
||||
* */
|
||||
public static final String AFL_WORK_DIR_KEY = "AFL_PIPELINE_WORK_DIR";
|
||||
public static final String AFL_DOCKER_WORK_DIR_KEY = "AFL_DOCKER_WORK_DIR_KEY";
|
||||
/**
|
||||
* 目标上传到工作目录下文件夹的前缀(TASK_${taskId})
|
||||
* */
|
||||
public static final String AFL_SLOT_COMPILE_PATH_PREFIX = "TASK_";
|
||||
public static final String AFL_DOCKER_SLOT_COMPILE_PATH_PREFIX = "TASK_";
|
||||
/**
|
||||
* 执行插装编译节点以后输出目录,用于设置到全局变量给后续节点使用
|
||||
*
|
||||
* */
|
||||
public static final String AFL_SLOT_COMPILE_PATH_KEY = "AFL_SLOT_COMPILE_PATH";
|
||||
public static final String AFL_DOCKER_SLOT_COMPILE_PATH_KEY = "AFL_DOCKER_SLOT_COMPILE_PATH_KEY";
|
||||
/**
|
||||
* 测试用例生成,ai生成或者直接文件上传的路径,工作目录下 SEED
|
||||
* */
|
||||
public static final String AFL_SEED_PATH="SEED";
|
||||
public static final String AFL_DOCKER_SEED_PATH="SEED";
|
||||
/**
|
||||
* AFL输出目录
|
||||
* */
|
||||
public static final String AFL_OUTPUT = "ai_afl";
|
||||
public static final String AFL_DOCKER_OUTPUT = "ai_afl";
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import cd.casic.ci.process.common.WorkAtom;
|
||||
import cd.casic.ci.process.dto.req.resource.ResourceQueryReq;
|
||||
import cd.casic.ci.process.dto.resp.resource.ResourceFindResp;
|
||||
import cd.casic.ci.process.dto.resp.taskResource.TaskResourceFindResp;
|
||||
import cd.casic.ci.process.engine.constant.AFLSlotCompileConstant;
|
||||
import cd.casic.ci.process.engine.constant.DIYImageExecuteCommandConstant;
|
||||
import cd.casic.ci.process.engine.constant.PipelineGlobalVariableConstant;
|
||||
import cd.casic.ci.process.engine.runContext.TaskRunContext;
|
||||
@ -15,6 +16,7 @@ import cd.casic.ci.process.process.dataObject.task.PipTask;
|
||||
import cd.casic.ci.process.process.service.resource.ResourceManagerService;
|
||||
import cd.casic.ci.process.process.service.target.TargetVersionService;
|
||||
import cd.casic.ci.process.util.SftpUploadUtil;
|
||||
import cd.casic.framework.commons.exception.enums.GlobalErrorCodeConstants;
|
||||
import cd.casic.module.execute.docker.dataobject.dto.DockerEndpointDo;
|
||||
import com.alibaba.fastjson.JSON;
|
||||
import jakarta.annotation.Resource;
|
||||
@ -32,13 +34,14 @@ public class AFLSlotCompileWorker extends DockerWorker {
|
||||
@Override
|
||||
public void execute(TaskRunContext context) {
|
||||
PipTask task = context.getContextDef() instanceof PipTask ? ((PipTask) context.getContextDef()) : null;
|
||||
String workDir = PipelineGlobalVariableConstant.AFL_WORK_DIR_PREFIX + task.getPipelineId();
|
||||
// 每一个流水线不同目录互不干扰
|
||||
String workDir = PipelineGlobalVariableConstant.AFL_DOCKER_WORK_DIR_PREFIX + task.getPipelineId();
|
||||
Map<String, Object> globalVariables = context.getGlobalVariables();
|
||||
if (!globalVariables.containsKey(workDir)) {
|
||||
globalVariables.put(PipelineGlobalVariableConstant.AFL_WORK_DIR_KEY,workDir);
|
||||
globalVariables.put(PipelineGlobalVariableConstant.AFL_DOCKER_WORK_DIR_KEY,workDir);
|
||||
}
|
||||
Map<String, Object> taskProperties = task.getTaskProperties();
|
||||
String managerId = taskProperties.get("managerId") instanceof String ? ((String) taskProperties.get("resourceId")) : null;
|
||||
String managerId = taskProperties.get(AFLSlotCompileConstant.MANAGER_ID) instanceof String ? ((String) taskProperties.get("managerId")) : null;
|
||||
// ssh 上传目标文件
|
||||
ResourceFindResp resourceById = resourceManagerService.findResourceById(managerId);
|
||||
String machineId = resourceById.getMachineId();
|
||||
@ -65,28 +68,35 @@ public class AFLSlotCompileWorker extends DockerWorker {
|
||||
append(context,"目标文件不存在或不可读");
|
||||
toBadEnding();
|
||||
}
|
||||
// 工作目录下的相对路径
|
||||
String compilePath = PipelineGlobalVariableConstant.AFL_SLOT_COMPILE_PATH_PREFIX+task.getId();
|
||||
String realWorkDir = workDir + compilePath;
|
||||
// 工作目录下的相对路径,/test 作为根路径
|
||||
String compilePath = PipelineGlobalVariableConstant.AFL_DOCKER_WORK_DIR_PREFIX
|
||||
+ task.getPipelineId()
|
||||
+ File.separator
|
||||
+ PipelineGlobalVariableConstant.AFL_DOCKER_SLOT_COMPILE_PATH_PREFIX+task.getId();
|
||||
// 卷挂载目录+流水线目录 + task编译目录
|
||||
String realPath = PipelineGlobalVariableConstant.AFL_VOLUME_WORK_DIR_PREFIX+File.separator+ compilePath;
|
||||
// 上传目标文件 到指定资源服务器
|
||||
try {
|
||||
SftpUploadUtil.uploadFileViaSftp(
|
||||
machineInfo.getMachineHost()
|
||||
,Integer.valueOf(machineInfo.getSshPort()),machineInfo.getUsername(),machineInfo.getPassword(),"",compilePath,filePath,file.getName());
|
||||
,Integer.valueOf(machineInfo.getSshPort()),machineInfo.getUsername(),machineInfo.getPassword(),"",realPath,filePath,file.getName());
|
||||
} catch (SftpUploadUtil.SftpUploadException e) {
|
||||
append(context,"上传文件失败,请确认资源信息是否有误:"+JSON.toJSONString(machineInfo));
|
||||
toBadEnding();
|
||||
}
|
||||
// 执行预设命令 ,进入目录
|
||||
String allCommand = "docker run -v "+workDir+":/test -it aflplusplus/aflplusplus bash\n" +
|
||||
"cd /test\n"+
|
||||
String allCommand = "docker run -v "+ PipelineGlobalVariableConstant.AFL_VOLUME_WORK_DIR_PREFIX +":"+PipelineGlobalVariableConstant.AFL_DOCKER_BASE_DIR+" -it aflplusplus bash\n" +
|
||||
"cd "+PipelineGlobalVariableConstant.AFL_DOCKER_BASE_DIR+"\n"+// 进入到容器中的卷挂载目录
|
||||
"mkdir -p "+compilePath+"\n"+
|
||||
"cd "+compilePath+"\n";
|
||||
Object commandScriptObj = taskProperties.get(DIYImageExecuteCommandConstant.COMMAND_SCRIPT);
|
||||
Object commandScriptObj = taskProperties.get(AFLSlotCompileConstant.COMMAND_SCRIPT);
|
||||
String commandScript = commandScriptObj instanceof String ? ((String) commandScriptObj) : null;
|
||||
allCommand += commandScript;
|
||||
allCommand += commandScript+"\nll -s";
|
||||
dockerRun(allCommand,dockerInfo,context);
|
||||
// 更新全局变量
|
||||
globalVariables.put(PipelineGlobalVariableConstant.AFL_SLOT_COMPILE_PATH_KEY,compilePath);
|
||||
globalVariables.put(PipelineGlobalVariableConstant.AFL_WORK_DIR_KEY,workDir);
|
||||
// /test目录下编译文件与目标所在目录,当前pipelineId+taskId组合
|
||||
globalVariables.put(PipelineGlobalVariableConstant.AFL_DOCKER_SLOT_COMPILE_PATH_KEY,compilePath);
|
||||
// /test目录下当前流水线工作目录(没有task部分)
|
||||
globalVariables.put(PipelineGlobalVariableConstant.AFL_DOCKER_WORK_DIR_KEY,workDir);
|
||||
}
|
||||
}
|
||||
|
@ -3,16 +3,20 @@ package cd.casic.ci.process.engine.worker.afl;
|
||||
|
||||
import cd.casic.ci.process.common.WorkAtom;
|
||||
import cd.casic.ci.process.dto.req.resource.ResourceQueryReq;
|
||||
import cd.casic.ci.process.dto.resp.resource.ResourceFindResp;
|
||||
import cd.casic.ci.process.dto.resp.taskResource.TaskResourceFindResp;
|
||||
import cd.casic.ci.process.engine.constant.DIYImageExecuteCommandConstant;
|
||||
import cd.casic.ci.process.engine.constant.PipelineGlobalVariableConstant;
|
||||
import cd.casic.ci.process.engine.constant.TestCaseGenerationConstant;
|
||||
import cd.casic.ci.process.engine.runContext.TaskRunContext;
|
||||
import cd.casic.ci.process.engine.worker.base.DockerWorker;
|
||||
import cd.casic.ci.process.engine.worker.base.SshWorker;
|
||||
import cd.casic.ci.process.process.dataObject.pipeline.PipPipeline;
|
||||
import cd.casic.ci.process.process.dataObject.resource.PipResourceMachine;
|
||||
import cd.casic.ci.process.process.dataObject.task.PipTask;
|
||||
import cd.casic.ci.process.util.CryptogramUtil;
|
||||
import cd.casic.ci.process.util.SftpUploadUtil;
|
||||
import cd.casic.module.execute.docker.dataobject.dto.DockerEndpointDo;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
@ -22,65 +26,46 @@ import java.util.Map;
|
||||
|
||||
@WorkAtom(taskType = "TEST_CASE_GENERATION")
|
||||
@Slf4j
|
||||
public class TestCaseGenerationWorker extends SshWorker {
|
||||
public class TestCaseGenerationWorker extends DockerWorker {
|
||||
@Override
|
||||
public void execute(TaskRunContext context) {
|
||||
int statusCode = -1;
|
||||
Map<String, Object> localVariables = context.getLocalVariables();
|
||||
Map<String, Object> globalVariables = context.getGlobalVariables();
|
||||
String workDir = globalVariables.get(PipelineGlobalVariableConstant.AFL_DOCKER_WORK_DIR_KEY) instanceof String ? ((String) globalVariables.get(PipelineGlobalVariableConstant.AFL_DOCKER_WORK_DIR_KEY)) : null;
|
||||
if (context.getContextDef() instanceof PipTask taskDef) {
|
||||
String seedPath= workDir+File.separator+PipelineGlobalVariableConstant.AFL_DOCKER_SLOT_COMPILE_PATH_PREFIX+taskDef.getId()+File.separator+PipelineGlobalVariableConstant.AFL_DOCKER_SEED_PATH;
|
||||
log.info(taskDef.getTaskName());
|
||||
Map<String, Object> taskProperties = taskDef.getTaskProperties();
|
||||
Object caseType = taskProperties.get(TestCaseGenerationConstant.CASE_TYPE_KEY);
|
||||
String resourceType = taskProperties.get("resourceType").toString();
|
||||
String resourceId = taskProperties.get("resourceId").toString();
|
||||
if (StringUtils.isEmpty(resourceId) || StringUtils.isEmpty(resourceType)) {
|
||||
// 缺少参数
|
||||
toBadEnding();
|
||||
String binaryName = taskProperties.get("binaryName").toString();
|
||||
String managerId = taskProperties.get("managerId") instanceof String ? ((String) taskProperties.get("managerId")) : null;
|
||||
ResourceFindResp resourceById = resourceManagerService.findResourceById(managerId);
|
||||
String machineId = resourceById.getMachineId();
|
||||
String dockerId = resourceById.getDockerId();
|
||||
if (StringUtils.isEmpty(machineId)||StringUtils.isEmpty(dockerId)) {
|
||||
append(context,"该资源不支持docker或者ssh");
|
||||
}
|
||||
ResourceQueryReq req = new ResourceQueryReq();
|
||||
req.setId(resourceId);
|
||||
req.setType(resourceType);
|
||||
TaskResourceFindResp resourceListByType = getResourceManagerService().findResourceListByType(req);
|
||||
if (CollectionUtils.isEmpty(resourceListByType.getResourceMachineList())) {
|
||||
append(context,"当前机器不支持machine");
|
||||
return;
|
||||
}
|
||||
req.setId(machineId);
|
||||
req.setType("machine");
|
||||
TaskResourceFindResp machineQuery = resourceManagerService.findResourceListByType(req);
|
||||
req.setId(dockerId);
|
||||
req.setType("docker");
|
||||
TaskResourceFindResp dockerQuery = resourceManagerService.findResourceListByType(req);
|
||||
PipResourceMachine machineInfo = machineQuery.getResourceMachineList().get(0);
|
||||
DockerEndpointDo dockerInfo = dockerQuery.getDockerEndpointList().get(0);
|
||||
//如果machineId为0,则说明该节点没有配置机器,则使用开始节点的机器
|
||||
PipResourceMachine resourceMachine = resourceListByType.getResourceMachineList().get(0);
|
||||
if (TestCaseGenerationConstant.CASE_TYPE_AI.equals(caseType)) {
|
||||
Object commandScriptObj = taskProperties.get(TestCaseGenerationConstant.COMMAND_SCRIPT);
|
||||
String commandScript = commandScriptObj instanceof String ? ((String) commandScriptObj) : null;
|
||||
String seedTarget = taskProperties.get(TestCaseGenerationConstant.SEED_TARGET) instanceof String ? ((String) taskProperties.get(TestCaseGenerationConstant.SEED_TARGET)) : null;
|
||||
if (StringUtils.isEmpty(commandScript)) {
|
||||
// 缺少参数
|
||||
toBadEnding();
|
||||
}
|
||||
if (StringUtils.isNotEmpty(seedTarget)) {
|
||||
commandScript += " --output-dir " + seedTarget + " --count 100";
|
||||
} else {
|
||||
commandScript += " --output-dir case --count 100";
|
||||
}
|
||||
try {
|
||||
// 容器内部test 目录, 获取编译后的文件
|
||||
String compileDir = globalVariables.get(PipelineGlobalVariableConstant.AFL_DOCKER_SLOT_COMPILE_PATH_KEY) instanceof String ? ((String) globalVariables.get(PipelineGlobalVariableConstant.AFL_DOCKER_WORK_DIR_KEY)) : null;
|
||||
|
||||
String commandScript = "cd /test\n" +
|
||||
"mkdir -p "+seedPath +"\n"+
|
||||
"PYTHONPATH=/test/CaseGenerator/src python3 /test/CaseGenerator/src/CaseGenerator/main.py --work-dir "+compileDir+" --binary "+compileDir+File.separator+binaryName+" --output-dir "+seedPath+" --count 100";
|
||||
//将节点的配置信息反编译成对象
|
||||
log.info("构建脚本" + commandScript);
|
||||
//获取机器
|
||||
statusCode = shell(resourceMachine, null, context,
|
||||
"echo \"测试用例生成\"",
|
||||
"cd /home/casic/706/yunqi",// 基础目录
|
||||
commandScript
|
||||
);
|
||||
} catch (Exception e) {
|
||||
String errorMessage = "该节点配置信息为空,请先配置该节点信息" + "\r\n";
|
||||
log.error("执行ssh失败:", e);
|
||||
append(context, errorMessage);
|
||||
toBadEnding();
|
||||
}
|
||||
if (statusCode == 0) {
|
||||
log.info("节点执行完成");
|
||||
} else {
|
||||
log.error("节点执行失败");
|
||||
}
|
||||
localVariables.put(DIYImageExecuteCommandConstant.STATUS_CODE, statusCode);
|
||||
dockerRun(commandScript,dockerInfo,context);
|
||||
} else {
|
||||
// 文件上传
|
||||
String filePath = taskProperties.get(TestCaseGenerationConstant.SEED_SOURCE) instanceof String ? ((String) taskProperties.get(TestCaseGenerationConstant.SEED_SOURCE)) : null;
|
||||
@ -89,17 +74,12 @@ public class TestCaseGenerationWorker extends SshWorker {
|
||||
toBadEnding();
|
||||
}
|
||||
File file = new File(filePath);
|
||||
String seedTarget = taskProperties.get(TestCaseGenerationConstant.SEED_TARGET) instanceof String ? ((String) taskProperties.get(TestCaseGenerationConstant.SEED_TARGET)) : null;
|
||||
String basePath = "/home/casic/706/yunqi/";
|
||||
if (seedTarget.startsWith("/")) {
|
||||
seedTarget=seedTarget.substring(1);
|
||||
}
|
||||
seedTarget = basePath+seedTarget;
|
||||
String seedTarget = PipelineGlobalVariableConstant.AFL_VOLUME_WORK_DIR_PREFIX+File.separator+seedPath;
|
||||
// 将文件上传到服务器的 目录底下
|
||||
try {
|
||||
SftpUploadUtil.uploadFileViaSftp(
|
||||
resourceMachine.getMachineHost()
|
||||
,Integer.valueOf(resourceMachine.getSshPort()),resourceMachine.getUsername(),resourceMachine.getPassword(),"",filePath,seedTarget,file.getName());
|
||||
machineInfo.getMachineHost()
|
||||
,Integer.valueOf(machineInfo.getSshPort()),machineInfo.getUsername(),machineInfo.getPassword(),"",filePath,seedTarget,file.getName());
|
||||
} catch (SftpUploadUtil.SftpUploadException e) {
|
||||
append(context,"seed文件上传失败");
|
||||
log.error("seed文件上传失败",e);
|
||||
|
@ -71,9 +71,9 @@ spring:
|
||||
data:
|
||||
redis:
|
||||
host: 127.0.0.1 # 地址
|
||||
# port: 16379 # 端口
|
||||
port: 16379 # 端口
|
||||
# host: 192.168.1.120 # 地址
|
||||
port: 6379 # 端口
|
||||
# port: 6379 # 端口
|
||||
database: 0 # 数据库索引
|
||||
# password: dev # 密码,建议生产环境开启
|
||||
|
||||
@ -136,6 +136,9 @@ spring:
|
||||
service-host-type: IP # 注册实例时,优先使用 IP [IP, HOST_NAME, CANONICAL_HOST_NAME]
|
||||
# Spring Boot Admin Server 服务端的相关配置
|
||||
context-path: /admin # 配置 Spring
|
||||
mvc:
|
||||
async:
|
||||
request-timeout: 30000
|
||||
|
||||
# 日志文件配置
|
||||
logging:
|
||||
@ -163,3 +166,12 @@ sast:
|
||||
password: Aa123456
|
||||
id: clouditera
|
||||
captcha: clouditera
|
||||
base-url: "http://39.155.212.109:22880" #远程
|
||||
# base-url: "http://192.168.31.93" #本地
|
||||
tartet:
|
||||
file-upload:
|
||||
remoteHost: 175.6.27.252
|
||||
remotePort: 22
|
||||
username: roots
|
||||
password: hnidc0327cn!@#xhh
|
||||
remoteDir: /home/ops/ops-pro/file/
|
@ -82,6 +82,9 @@ spring:
|
||||
redis:
|
||||
repositories:
|
||||
enabled: false # 项目未使用到 Spring Data Redis 的 Repository,所以直接禁用,保证启动速度
|
||||
mvc:
|
||||
async:
|
||||
request-timeout: 1000000
|
||||
|
||||
# VO 转换(数据翻译)相关
|
||||
easy-trans:
|
||||
|
Loading…
x
Reference in New Issue
Block a user