Оптимизация получения данных о проектах для обработки новых и старых MR

This commit is contained in:
Struchkov Mark 2022-12-16 17:57:44 +03:00
parent e12517c35a
commit 20b520eda1
14 changed files with 80 additions and 59 deletions

View File

@ -50,9 +50,8 @@ public class Pipeline {
@Column(name = "web_url") @Column(name = "web_url")
private String webUrl; private String webUrl;
@ManyToOne @Column(name = "project_id")
@JoinColumn(name = "project_id") private Long projectId;
private Project project;
@ManyToOne(cascade = {CascadeType.PERSIST, CascadeType.MERGE, CascadeType.REFRESH}) @ManyToOne(cascade = {CascadeType.PERSIST, CascadeType.MERGE, CascadeType.REFRESH})
@JoinColumn(name = "person_id") @JoinColumn(name = "person_id")

View File

@ -11,6 +11,7 @@ import static dev.struchkov.haiti.utils.Strings.escapeMarkdown;
/** /**
* @author upagge 17.01.2021 * @author upagge 17.01.2021
*/ */
//TODO [16.12.2022|uPagge]: Нужно реализовать заполнение projectName
public record PipelineNotify( public record PipelineNotify(
Long pipelineId, Long pipelineId,
String projectName, String projectName,

View File

@ -23,4 +23,6 @@ public interface ProjectRepository {
Page<Project> findAllById(Pageable pagination); Page<Project> findAllById(Pageable pagination);
Set<Long> findAllIds();
} }

View File

@ -3,8 +3,6 @@ package dev.struchkov.bot.gitlab.context.service;
import dev.struchkov.bot.gitlab.context.domain.ExistContainer; import dev.struchkov.bot.gitlab.context.domain.ExistContainer;
import dev.struchkov.bot.gitlab.context.domain.entity.Project; import dev.struchkov.bot.gitlab.context.domain.entity.Project;
import lombok.NonNull; import lombok.NonNull;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@ -20,12 +18,11 @@ public interface ProjectService {
Project getByIdOrThrow(@NonNull Long projectId); Project getByIdOrThrow(@NonNull Long projectId);
Page<Project> getAll(@NonNull Pageable pagination);
List<Project> createAll(List<Project> newProjects); List<Project> createAll(List<Project> newProjects);
boolean existsById(Long projectId); boolean existsById(Long projectId);
ExistContainer<Project, Long> existsById(Set<Long> projectIds); ExistContainer<Project, Long> existsById(Set<Long> projectIds);
Set<Long> getAllIds();
} }

View File

@ -8,6 +8,7 @@ import dev.struchkov.bot.gitlab.context.service.CleanService;
import dev.struchkov.bot.gitlab.context.service.MergeRequestsService; import dev.struchkov.bot.gitlab.context.service.MergeRequestsService;
import dev.struchkov.bot.gitlab.context.service.PipelineService; import dev.struchkov.bot.gitlab.context.service.PipelineService;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@ -24,6 +25,7 @@ import static dev.struchkov.bot.gitlab.context.domain.MergeRequestState.MERGED;
* *
* @author upagge 08.02.2021 * @author upagge 08.02.2021
*/ */
@Slf4j
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
public class CleanServiceImpl implements CleanService { public class CleanServiceImpl implements CleanService {
@ -38,6 +40,7 @@ public class CleanServiceImpl implements CleanService {
@Override @Override
public void cleanOldMergedRequests() { public void cleanOldMergedRequests() {
log.debug("Старт очистки старых MR");
int page = 0; int page = 0;
Page<MergeRequest> mergeRequestSheet = mergeRequestsService.getAll(MR_CLEAN_FILTER, PageRequest.of(page, COUNT)); Page<MergeRequest> mergeRequestSheet = mergeRequestsService.getAll(MR_CLEAN_FILTER, PageRequest.of(page, COUNT));
@ -50,10 +53,12 @@ public class CleanServiceImpl implements CleanService {
mergeRequestSheet = mergeRequestsService.getAll(MR_CLEAN_FILTER, PageRequest.of(++page, COUNT)); mergeRequestSheet = mergeRequestsService.getAll(MR_CLEAN_FILTER, PageRequest.of(++page, COUNT));
} }
log.debug("Конец очистки старых MR");
} }
@Override @Override
public void cleanOldPipelines() { public void cleanOldPipelines() {
log.debug("Старт очистки старых пайплайнов");
int page = 0; int page = 0;
final PipelineFilter filter = cleanPipelineFilter(); final PipelineFilter filter = cleanPipelineFilter();
Page<Pipeline> sheet = pipelineService.getAll(filter, PageRequest.of(page, COUNT)); Page<Pipeline> sheet = pipelineService.getAll(filter, PageRequest.of(page, COUNT));
@ -67,6 +72,7 @@ public class CleanServiceImpl implements CleanService {
sheet = pipelineService.getAll(filter, PageRequest.of(page, COUNT)); sheet = pipelineService.getAll(filter, PageRequest.of(page, COUNT));
} }
log.debug("Конец очистки старых пайплайнов");
} }
private PipelineFilter cleanPipelineFilter() { private PipelineFilter cleanPipelineFilter() {

View File

@ -61,7 +61,6 @@ public class PipelineServiceImpl implements PipelineService {
PipelineNotify.builder() PipelineNotify.builder()
.newStatus(pipeline.getStatus().name()) .newStatus(pipeline.getStatus().name())
.pipelineId(pipeline.getId()) .pipelineId(pipeline.getId())
.projectName(pipeline.getProject().getName())
.refName(pipeline.getRef()) .refName(pipeline.getRef())
.webUrl(pipeline.getWebUrl()) .webUrl(pipeline.getWebUrl())
.oldStatus(oldStatus) .oldStatus(oldStatus)
@ -76,7 +75,6 @@ public class PipelineServiceImpl implements PipelineService {
.orElseThrow(notFoundException("Pipeline не найден")); .orElseThrow(notFoundException("Pipeline не найден"));
if (!oldPipeline.getUpdated().equals(pipeline.getUpdated())) { if (!oldPipeline.getUpdated().equals(pipeline.getUpdated())) {
pipeline.setProject(oldPipeline.getProject());
notifyNewPipeline(pipeline, oldPipeline.getStatus().name()); notifyNewPipeline(pipeline, oldPipeline.getStatus().name());
return repository.save(pipeline); return repository.save(pipeline);
} }

View File

@ -10,8 +10,6 @@ import dev.struchkov.bot.gitlab.context.service.PersonService;
import dev.struchkov.bot.gitlab.context.service.ProjectService; import dev.struchkov.bot.gitlab.context.service.ProjectService;
import lombok.NonNull; import lombok.NonNull;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@ -60,11 +58,6 @@ public class ProjectServiceImpl implements ProjectService {
.orElseThrow(notFoundException("Проект не найден")); .orElseThrow(notFoundException("Проект не найден"));
} }
@Override
public Page<Project> getAll(@NonNull Pageable pagination) {
return repository.findAllById(pagination);
}
@Override @Override
@Transactional @Transactional
public List<Project> createAll(List<Project> newProjects) { public List<Project> createAll(List<Project> newProjects) {
@ -93,6 +86,12 @@ public class ProjectServiceImpl implements ProjectService {
} }
} }
@Override
@Transactional(readOnly = true)
public Set<Long> getAllIds() {
return repository.findAllIds();
}
private void notifyAboutNewProject(Project newProject, String authorName) { private void notifyAboutNewProject(Project newProject, String authorName) {
notifyService.send( notifyService.send(
NewProjectNotify.builder() NewProjectNotify.builder()

View File

@ -12,6 +12,7 @@ import dev.struchkov.bot.gitlab.core.config.properties.PersonProperty;
import dev.struchkov.bot.gitlab.sdk.domain.DiscussionJson; import dev.struchkov.bot.gitlab.sdk.domain.DiscussionJson;
import dev.struchkov.haiti.utils.network.HttpParse; import dev.struchkov.haiti.utils.network.HttpParse;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.ConversionService;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
@ -39,6 +40,7 @@ import static dev.struchkov.haiti.utils.network.HttpParse.ACCEPT;
* *
* @author upagge 11.02.2021 * @author upagge 11.02.2021
*/ */
@Slf4j
@Component @Component
@RequiredArgsConstructor @RequiredArgsConstructor
public class DiscussionParser { public class DiscussionParser {
@ -56,6 +58,7 @@ public class DiscussionParser {
* Поиск новых обсуждений * Поиск новых обсуждений
*/ */
public void scanNewDiscussion() { public void scanNewDiscussion() {
log.debug("Старт обработки новых дискуссий");
int page = 0; int page = 0;
Page<MergeRequest> mergeRequestSheet = mergeRequestsService.getAll(PageRequest.of(page, COUNT)); Page<MergeRequest> mergeRequestSheet = mergeRequestsService.getAll(PageRequest.of(page, COUNT));
@ -68,6 +71,7 @@ public class DiscussionParser {
mergeRequestSheet = mergeRequestsService.getAll(PageRequest.of(++page, COUNT)); mergeRequestSheet = mergeRequestsService.getAll(PageRequest.of(++page, COUNT));
} }
log.debug("Конец обработки новых дискуссий");
} }
private void processingMergeRequest(MergeRequest mergeRequest) { private void processingMergeRequest(MergeRequest mergeRequest) {
@ -151,6 +155,7 @@ public class DiscussionParser {
* Сканирование старых обсуждений на предмет новых комментарие * Сканирование старых обсуждений на предмет новых комментарие
*/ */
public void scanOldDiscussions() { public void scanOldDiscussions() {
log.debug("Старт обработки старых дискуссий");
int page = 0; int page = 0;
Page<Discussion> discussionPage = discussionService.getAll(PageRequest.of(page, COUNT)); Page<Discussion> discussionPage = discussionService.getAll(PageRequest.of(page, COUNT));
@ -183,7 +188,7 @@ public class DiscussionParser {
discussionPage = discussionService.getAll(PageRequest.of(++page, COUNT)); discussionPage = discussionService.getAll(PageRequest.of(++page, COUNT));
} }
log.debug("Конец обработки старых дискуссий");
} }
private Optional<DiscussionJson> getOldDiscussionJson(Discussion discussion) { private Optional<DiscussionJson> getOldDiscussionJson(Discussion discussion) {

View File

@ -5,7 +5,6 @@ import dev.struchkov.bot.gitlab.context.domain.IdAndStatusPr;
import dev.struchkov.bot.gitlab.context.domain.MergeRequestState; import dev.struchkov.bot.gitlab.context.domain.MergeRequestState;
import dev.struchkov.bot.gitlab.context.domain.entity.MergeRequest; import dev.struchkov.bot.gitlab.context.domain.entity.MergeRequest;
import dev.struchkov.bot.gitlab.context.domain.entity.Person; import dev.struchkov.bot.gitlab.context.domain.entity.Person;
import dev.struchkov.bot.gitlab.context.domain.entity.Project;
import dev.struchkov.bot.gitlab.context.service.MergeRequestsService; import dev.struchkov.bot.gitlab.context.service.MergeRequestsService;
import dev.struchkov.bot.gitlab.context.service.ProjectService; import dev.struchkov.bot.gitlab.context.service.ProjectService;
import dev.struchkov.bot.gitlab.core.config.properties.GitlabProperty; import dev.struchkov.bot.gitlab.core.config.properties.GitlabProperty;
@ -17,8 +16,6 @@ import dev.struchkov.haiti.utils.network.HttpParse;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.ConversionService;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.text.MessageFormat; import java.text.MessageFormat;
@ -51,6 +48,7 @@ public class MergeRequestParser {
private final PersonProperty personProperty; private final PersonProperty personProperty;
public void parsingOldMergeRequest() { public void parsingOldMergeRequest() {
log.debug("Старт обработки старых MR");
final Set<IdAndStatusPr> existIds = mergeRequestsService.getAllId(OLD_STATUSES); final Set<IdAndStatusPr> existIds = mergeRequestsService.getAllId(OLD_STATUSES);
final List<MergeRequest> mergeRequests = existIds.stream() final List<MergeRequest> mergeRequests = existIds.stream()
@ -68,27 +66,23 @@ public class MergeRequestParser {
personMapping(mergeRequests); personMapping(mergeRequests);
mergeRequestsService.updateAll(mergeRequests); mergeRequestsService.updateAll(mergeRequests);
} }
log.debug("Конец обработки старых MR");
} }
public void parsingNewMergeRequest() { public void parsingNewMergeRequest() {
int page = 0; log.debug("Старт обработки новых MR");
Page<Project> projectSheet = projectService.getAll(PageRequest.of(page, COUNT)); final Set<Long> projectIds = projectService.getAllIds();
while (projectSheet.hasContent()) { for (Long projectId : projectIds) {
final List<Project> projects = projectSheet.getContent(); projectProcessing(projectId);
for (Project project : projects) {
projectProcessing(project);
} }
projectSheet = projectService.getAll(PageRequest.of(++page, COUNT)); log.debug("Конец обработки новых MR");
}
} }
private void projectProcessing(Project project) { private void projectProcessing(Long projectId) {
int page = 1; int page = 1;
List<MergeRequestJson> mergeRequestJsons = getMergeRequestJsons(project, page); List<MergeRequestJson> mergeRequestJsons = getMergeRequestJsons(projectId, page);
while (checkNotEmpty(mergeRequestJsons)) { while (checkNotEmpty(mergeRequestJsons)) {
@ -97,6 +91,7 @@ public class MergeRequestParser {
.collect(Collectors.toSet()); .collect(Collectors.toSet());
final ExistContainer<MergeRequest, Long> existContainer = mergeRequestsService.existsById(jsonIds); final ExistContainer<MergeRequest, Long> existContainer = mergeRequestsService.existsById(jsonIds);
log.trace("Из {} полученных MR не найдены в хранилище {}", jsonIds.size(), existContainer.getIdNoFound().size());
if (!existContainer.isAllFound()) { if (!existContainer.isAllFound()) {
final List<MergeRequest> newMergeRequests = mergeRequestJsons.stream() final List<MergeRequest> newMergeRequests = mergeRequestJsons.stream()
.filter(json -> existContainer.getIdNoFound().contains(json.getId())) .filter(json -> existContainer.getIdNoFound().contains(json.getId()))
@ -109,10 +104,11 @@ public class MergeRequestParser {
personMapping(newMergeRequests); personMapping(newMergeRequests);
log.trace("Пачка новых MR обработана и отправлена на сохранение. Количество: {} шт.", newMergeRequests.size());
mergeRequestsService.createAll(newMergeRequests); mergeRequestsService.createAll(newMergeRequests);
} }
mergeRequestJsons = getMergeRequestJsons(project, page++); mergeRequestJsons = getMergeRequestJsons(projectId, page++);
} }
} }
@ -154,11 +150,13 @@ public class MergeRequestParser {
} }
} }
private List<MergeRequestJson> getMergeRequestJsons(Project project, int page) { private List<MergeRequestJson> getMergeRequestJsons(Long projectId, int page) {
return HttpParse.request(MessageFormat.format(gitlabProperty.getUrlPullRequestOpen(), project.getId(), page)) final List<MergeRequestJson> jsons = HttpParse.request(MessageFormat.format(gitlabProperty.getUrlPullRequestOpen(), projectId, page))
.header(StringUtils.H_PRIVATE_TOKEN, personProperty.getToken()) .header(StringUtils.H_PRIVATE_TOKEN, personProperty.getToken())
.header(ACCEPT) .header(ACCEPT)
.executeList(MergeRequestJson.class); .executeList(MergeRequestJson.class);
log.trace("Получено {} шт потенциально новых MR для проекта id:'{}' ", jsons.size(), projectId);
return jsons;
} }
private Optional<MergeRequestJson> getMergeRequest(IdAndStatusPr existId) { private Optional<MergeRequestJson> getMergeRequest(IdAndStatusPr existId) {

View File

@ -3,7 +3,6 @@ package dev.struchkov.bot.gitlab.core.service.parser;
import dev.struchkov.bot.gitlab.context.domain.ExistContainer; import dev.struchkov.bot.gitlab.context.domain.ExistContainer;
import dev.struchkov.bot.gitlab.context.domain.PipelineStatus; import dev.struchkov.bot.gitlab.context.domain.PipelineStatus;
import dev.struchkov.bot.gitlab.context.domain.entity.Pipeline; import dev.struchkov.bot.gitlab.context.domain.entity.Pipeline;
import dev.struchkov.bot.gitlab.context.domain.entity.Project;
import dev.struchkov.bot.gitlab.context.service.PipelineService; import dev.struchkov.bot.gitlab.context.service.PipelineService;
import dev.struchkov.bot.gitlab.context.service.ProjectService; import dev.struchkov.bot.gitlab.context.service.ProjectService;
import dev.struchkov.bot.gitlab.core.config.properties.GitlabProperty; import dev.struchkov.bot.gitlab.core.config.properties.GitlabProperty;
@ -12,6 +11,7 @@ import dev.struchkov.bot.gitlab.core.utils.StringUtils;
import dev.struchkov.bot.gitlab.sdk.domain.PipelineJson; import dev.struchkov.bot.gitlab.sdk.domain.PipelineJson;
import dev.struchkov.haiti.utils.network.HttpParse; import dev.struchkov.haiti.utils.network.HttpParse;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.ConversionService;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
@ -38,6 +38,7 @@ import static dev.struchkov.haiti.utils.network.HttpParse.ACCEPT;
* *
* @author upagge 17.01.2021 * @author upagge 17.01.2021
*/ */
@Slf4j
@Service @Service
@RequiredArgsConstructor @RequiredArgsConstructor
public class PipelineParser { public class PipelineParser {
@ -55,25 +56,21 @@ public class PipelineParser {
private LocalDateTime lastUpdate = LocalDateTime.now(); private LocalDateTime lastUpdate = LocalDateTime.now();
public void scanNewPipeline() { public void scanNewPipeline() {
log.debug("Старт обработки новых папйплайнов");
int page = 0; int page = 0;
Page<Project> projectSheet = projectService.getAll(PageRequest.of(page, COUNT)); final Set<Long> projectIds = projectService.getAllIds();
while (projectSheet.hasContent()) { for (Long projectId : projectIds) {
final List<Project> projects = projectSheet.getContent(); processingProject(projectId);
for (Project project : projects) {
processingProject(project);
} }
projectSheet = projectService.getAll(PageRequest.of(++page, COUNT)); log.debug("Конец обработки новых папйплайнов");
} }
} private void processingProject(Long projectId) {
private void processingProject(Project project) {
int page = 1; int page = 1;
LocalDateTime newLastUpdate = LocalDateTime.now(); LocalDateTime newLastUpdate = LocalDateTime.now();
List<PipelineJson> pipelineJsons = getPipelineJsons(project.getId(), page, lastUpdate); List<PipelineJson> pipelineJsons = getPipelineJsons(projectId, page, lastUpdate);
while (checkNotEmpty(pipelineJsons)) { while (checkNotEmpty(pipelineJsons)) {
@ -89,14 +86,14 @@ public class PipelineParser {
for (Long newId : idsNotFound) { for (Long newId : idsNotFound) {
final Pipeline newPipeline = HttpParse.request( final Pipeline newPipeline = HttpParse.request(
MessageFormat.format(gitlabProperty.getUrlPipeline(), project.getId(), newId) MessageFormat.format(gitlabProperty.getUrlPipeline(), projectId, newId)
) )
.header(ACCEPT) .header(ACCEPT)
.header(StringUtils.H_PRIVATE_TOKEN, personProperty.getToken()) .header(StringUtils.H_PRIVATE_TOKEN, personProperty.getToken())
.execute(PipelineJson.class) .execute(PipelineJson.class)
.map(json -> { .map(json -> {
final Pipeline pipeline = conversionService.convert(json, Pipeline.class); final Pipeline pipeline = conversionService.convert(json, Pipeline.class);
pipeline.setProject(project); pipeline.setProjectId(projectId);
return pipeline; return pipeline;
}) })
.orElseThrow(convertException("Ошибка обновления Pipelines")); .orElseThrow(convertException("Ошибка обновления Pipelines"));
@ -105,7 +102,7 @@ public class PipelineParser {
} }
pipelineJsons = getPipelineJsons(project.getId(), ++page, lastUpdate); pipelineJsons = getPipelineJsons(projectId, ++page, lastUpdate);
} }
lastUpdate = newLastUpdate; lastUpdate = newLastUpdate;
@ -120,6 +117,7 @@ public class PipelineParser {
} }
public void scanOldPipeline() { public void scanOldPipeline() {
log.debug("Старт обработки старых папйплайнов");
int page = 0; int page = 0;
Page<Pipeline> pipelineSheet = pipelineService.getAllByStatuses(oldStatus, PageRequest.of(page, COUNT)); Page<Pipeline> pipelineSheet = pipelineService.getAllByStatuses(oldStatus, PageRequest.of(page, COUNT));
@ -128,7 +126,7 @@ public class PipelineParser {
for (Pipeline pipeline : pipelines) { for (Pipeline pipeline : pipelines) {
final Pipeline newPipeline = HttpParse.request( final Pipeline newPipeline = HttpParse.request(
MessageFormat.format(gitlabProperty.getUrlPipeline(), pipeline.getProject().getId(), pipeline.getId()) MessageFormat.format(gitlabProperty.getUrlPipeline(), pipeline.getProjectId(), pipeline.getId())
) )
.header(ACCEPT) .header(ACCEPT)
.header(StringUtils.H_PRIVATE_TOKEN, personProperty.getToken()) .header(StringUtils.H_PRIVATE_TOKEN, personProperty.getToken())
@ -141,6 +139,7 @@ public class PipelineParser {
pipelineSheet = pipelineService.getAllByStatuses(oldStatus, PageRequest.of(++page, COUNT)); pipelineSheet = pipelineService.getAllByStatuses(oldStatus, PageRequest.of(++page, COUNT));
} }
log.debug("Конец обработки старых папйплайнов");
} }
} }

View File

@ -46,4 +46,9 @@ public class ProjectRepositoryImpl implements ProjectRepository {
return jpaRepository.findAll(pagination); return jpaRepository.findAll(pagination);
} }
@Override
public Set<Long> findAllIds() {
return jpaRepository.findAllIds();
}
} }

View File

@ -2,10 +2,16 @@ package dev.struchkov.bot.gitlab.data.jpa;
import dev.struchkov.bot.gitlab.context.domain.entity.Project; import dev.struchkov.bot.gitlab.context.domain.entity.Project;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import java.util.Set;
/** /**
* @author upagge 14.01.2021 * @author upagge 14.01.2021
*/ */
public interface ProjectJpaRepository extends JpaRepository<Project, Long> { public interface ProjectJpaRepository extends JpaRepository<Project, Long> {
@Query("SELECT p.id FROM Project p")
Set<Long> findAllIds();
} }

View File

@ -24,14 +24,16 @@ public class SchedulerService {
@Scheduled(cron = "0 */1 * * * *") @Scheduled(cron = "0 */1 * * * *")
public void newMergeRequest() { public void newMergeRequest() {
log.debug("Запуск процесса обновления данных");
mergeRequestParser.parsingOldMergeRequest(); mergeRequestParser.parsingOldMergeRequest();
mergeRequestParser.parsingNewMergeRequest(); mergeRequestParser.parsingNewMergeRequest();
pipelineParser.scanOldPipeline(); // pipelineParser.scanOldPipeline();
pipelineParser.scanNewPipeline(); // pipelineParser.scanNewPipeline();
discussionParser.scanOldDiscussions(); // discussionParser.scanOldDiscussions();
discussionParser.scanNewDiscussion(); // discussionParser.scanNewDiscussion();
cleanService.cleanOldPipelines(); // cleanService.cleanOldPipelines();
cleanService.cleanOldMergedRequests(); // cleanService.cleanOldMergedRequests();
log.debug("Конец процесса обновления данных");
} }
} }

View File

@ -1,3 +1,4 @@
spring: spring:
datasource: datasource:
url: ${DATASOURCE_URL} url: ${DATASOURCE_URL}
@ -51,3 +52,6 @@ gitlab-bot:
url-new-note: "${GITLAB_URL}/api/v4/projects/{0,number,#}/merge_requests/{1,number,#}/discussions/{2}/notes?body={3}" url-new-note: "${GITLAB_URL}/api/v4/projects/{0,number,#}/merge_requests/{1,number,#}/discussions/{2}/notes?body={3}"
url-discussion: "${GITLAB_URL}/api/v4/projects/{0,number,#}/merge_requests/{1,number,#}/discussions?&page={2,number,#}&per_page=100" url-discussion: "${GITLAB_URL}/api/v4/projects/{0,number,#}/merge_requests/{1,number,#}/discussions?&page={2,number,#}&per_page=100"
url-one-discussion: "${GITLAB_URL}/api/v4/projects/{0,number,#}/merge_requests/{1,number,#}/discussions/{2}" url-one-discussion: "${GITLAB_URL}/api/v4/projects/{0,number,#}/merge_requests/{1,number,#}/discussions/{2}"
logging:
level:
"dev.struchkov": trace