update 22

This commit is contained in:
2026-01-07 01:14:51 +09:00
parent 57c3eea429
commit 66e8e21302
220 changed files with 2911 additions and 700 deletions

View File

@@ -0,0 +1,39 @@
package research.loghunter.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import org.springframework.web.servlet.resource.PathResourceResolver;
import java.io.IOException;
@Configuration
public class WebConfig implements WebMvcConfigurer {
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/**")
.addResourceLocations("classpath:/static/")
.resourceChain(true)
.addResolver(new PathResourceResolver() {
@Override
protected Resource getResource(String resourcePath, Resource location) throws IOException {
Resource requestedResource = location.createRelative(resourcePath);
// 실제 파일이 존재하면 반환
if (requestedResource.exists() && requestedResource.isReadable()) {
return requestedResource;
}
// API 요청이 아니면 index.html 반환 (SPA 라우팅)
if (!resourcePath.startsWith("api/")) {
return new ClassPathResource("/static/index.html");
}
return null;
}
});
}
}

View File

@@ -6,9 +6,12 @@ import org.springframework.format.annotation.DateTimeFormat;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import research.loghunter.dto.ErrorLogDto;
import research.loghunter.dto.FileTreeDto;
import research.loghunter.service.ErrorLogService;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("/api/error-logs")
@@ -22,6 +25,7 @@ public class ErrorLogController {
@RequestParam(required = false) Long serverId,
@RequestParam(required = false) Long patternId,
@RequestParam(required = false) String severity,
@RequestParam(required = false) String filePath,
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime startDate,
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime endDate,
@RequestParam(required = false) String keyword,
@@ -29,7 +33,7 @@ public class ErrorLogController {
@RequestParam(defaultValue = "20") int size
) {
return ResponseEntity.ok(errorLogService.search(
serverId, patternId, severity, startDate, endDate, keyword, page, size));
serverId, patternId, severity, filePath, startDate, endDate, keyword, page, size));
}
@GetMapping("/{id}")
@@ -45,4 +49,46 @@ public class ErrorLogController {
) {
return ResponseEntity.ok(errorLogService.findByServerId(serverId, page, size));
}
/**
* 트리 구조 데이터 조회 (서버 > 로그경로 > 파일)
*/
@GetMapping("/tree")
public ResponseEntity<List<FileTreeDto.ServerNode>> getFileTree() {
return ResponseEntity.ok(errorLogService.getFileTree());
}
/**
* 서버별 파일 목록 조회
*/
@GetMapping("/files")
public ResponseEntity<List<String>> getFilesByServer(
@RequestParam(required = false) Long serverId
) {
return ResponseEntity.ok(errorLogService.getFilesByServer(serverId));
}
/**
* 선택한 에러 삭제
*/
@DeleteMapping("/batch")
public ResponseEntity<Map<String, Object>> deleteByIds(@RequestBody List<Long> ids) {
int deleted = errorLogService.deleteByIds(ids);
return ResponseEntity.ok(Map.of(
"success", true,
"deleted", deleted
));
}
/**
* 파일별 에러 및 스캔기록 삭제
*/
@DeleteMapping("/by-file")
public ResponseEntity<Map<String, Object>> deleteByFile(
@RequestParam Long serverId,
@RequestParam String filePath
) {
Map<String, Object> result = errorLogService.deleteFileAndErrors(serverId, filePath);
return ResponseEntity.ok(result);
}
}

View File

@@ -24,12 +24,13 @@ public class ExportController {
@RequestParam(required = false) Long serverId,
@RequestParam(required = false) Long patternId,
@RequestParam(required = false) String severity,
@RequestParam(required = false) String filePath,
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime startDate,
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime endDate,
@RequestParam(required = false) String keyword
) {
ExportService.ExportRequest request = new ExportService.ExportRequest(
serverId, patternId, severity, startDate, endDate, keyword);
serverId, patternId, severity, filePath, startDate, endDate, keyword);
ExportService.ExportResult result = exportService.exportHtml(request);
@@ -45,12 +46,13 @@ public class ExportController {
@RequestParam(required = false) Long serverId,
@RequestParam(required = false) Long patternId,
@RequestParam(required = false) String severity,
@RequestParam(required = false) String filePath,
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime startDate,
@RequestParam(required = false) @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime endDate,
@RequestParam(required = false) String keyword
) {
ExportService.ExportRequest request = new ExportService.ExportRequest(
serverId, patternId, severity, startDate, endDate, keyword);
serverId, patternId, severity, filePath, startDate, endDate, keyword);
ExportService.ExportResult result = exportService.exportTxt(request);

View File

@@ -11,6 +11,7 @@ import research.loghunter.service.SftpService;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@@ -133,4 +134,88 @@ public class ScanController {
public ResponseEntity<List<ScanHistory>> getHistory(@PathVariable Long serverId) {
return ResponseEntity.ok(scanService.getHistory(serverId));
}
/**
* 분석 결과 초기화 (서버별)
*/
@DeleteMapping("/reset/{serverId}")
public ResponseEntity<Map<String, Object>> resetScanData(@PathVariable Long serverId) {
ScanService.ResetResult result = scanService.resetScanData(serverId);
return ResponseEntity.ok(Map.of(
"success", true,
"deletedErrors", result.deletedErrors(),
"deletedFiles", result.deletedFiles(),
"deletedHistories", result.deletedHistories()
));
}
/**
* 전체 분석 결과 초기화
*/
@DeleteMapping("/reset-all")
public ResponseEntity<Map<String, Object>> resetAllScanData() {
ScanService.ResetResult result = scanService.resetAllScanData();
return ResponseEntity.ok(Map.of(
"success", true,
"deletedErrors", result.deletedErrors(),
"deletedFiles", result.deletedFiles(),
"deletedHistories", result.deletedHistories()
));
}
/**
* 파일별 에러 통계 조회
*/
@GetMapping("/stats/by-file")
public ResponseEntity<List<ScanService.FileErrorStats>> getErrorStatsByFile(
@RequestParam(required = false) Long serverId) {
return ResponseEntity.ok(scanService.getErrorStatsByFile(serverId));
}
/**
* 서버별 에러 통계 조회
*/
@GetMapping("/stats/by-server")
public ResponseEntity<List<ScanService.ServerErrorStats>> getErrorStatsByServer() {
return ResponseEntity.ok(scanService.getErrorStatsByServer());
}
/**
* 패턴별 에러 통계 조회
*/
@GetMapping("/stats/by-pattern")
public ResponseEntity<List<ScanService.PatternErrorStats>> getErrorStatsByPattern(
@RequestParam(required = false) Long serverId) {
return ResponseEntity.ok(scanService.getErrorStatsByPattern(serverId));
}
/**
* 대시보드용: 서버별 최근 N일 일별 통계
*/
@GetMapping("/stats/daily-by-server")
public ResponseEntity<List<ScanService.ServerDailyStats>> getDailyStatsByServer(
@RequestParam(defaultValue = "30") int days) {
return ResponseEntity.ok(scanService.getDailyStatsByServer(days));
}
/**
* 월별현황용: 서버별 해당 월 일별 통계
*/
@GetMapping("/stats/monthly-by-server")
public ResponseEntity<List<ScanService.ServerDailyStats>> getMonthlyStatsByServer(
@RequestParam int year,
@RequestParam int month) {
return ResponseEntity.ok(scanService.getMonthlyStatsByServer(year, month));
}
/**
* 일별현황용: 서버별 해당 날짜 5분 단위 통계
*/
@GetMapping("/stats/time-by-server")
public ResponseEntity<List<ScanService.ServerTimeStats>> getTimeStatsByServer(
@RequestParam String date,
@RequestParam(defaultValue = "5") int intervalMinutes) {
java.time.LocalDate localDate = java.time.LocalDate.parse(date);
return ResponseEntity.ok(scanService.getTimeStatsByServer(localDate, intervalMinutes));
}
}

View File

@@ -0,0 +1,45 @@
package research.loghunter.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
public class FileTreeDto {
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class ServerNode {
private Long serverId;
private String serverName;
private int totalErrorCount;
private List<PathNode> paths;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class PathNode {
private String path;
private int totalErrorCount;
private List<FileNode> files;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class FileNode {
private String filePath;
private String fileName;
private int errorCount;
private int criticalCount;
private int errorLevelCount;
private int warnCount;
}
}

View File

@@ -12,6 +12,7 @@ public class PatternDto {
private Long id;
private String name;
private String regex;
private String excludeRegex; // 제외 정규식
private String severity;
private Integer contextLines;
private String description;

View File

@@ -23,6 +23,9 @@ public class Pattern {
@Column(nullable = false, length = 1000)
private String regex; // 정규식
@Column(length = 1000)
private String excludeRegex; // 제외 정규식 (매칭되면 에러에서 제외)
@Column(nullable = false)
private String severity; // CRITICAL, ERROR, WARN

View File

@@ -3,6 +3,7 @@ package research.loghunter.repository;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import research.loghunter.entity.ErrorLog;
@@ -20,6 +21,7 @@ public interface ErrorLogRepository extends JpaRepository<ErrorLog, Long> {
"(:serverId IS NULL OR e.server.id = :serverId) AND " +
"(:patternId IS NULL OR e.pattern.id = :patternId) AND " +
"(:severity IS NULL OR e.severity = :severity) AND " +
"(:filePath IS NULL OR e.filePath = :filePath) AND " +
"(:startDate IS NULL OR e.occurredAt >= :startDate) AND " +
"(:endDate IS NULL OR e.occurredAt <= :endDate) AND " +
"(:keyword IS NULL OR LOWER(e.summary) LIKE LOWER(CONCAT('%', :keyword, '%')) OR LOWER(e.context) LIKE LOWER(CONCAT('%', :keyword, '%'))) " +
@@ -28,8 +30,94 @@ public interface ErrorLogRepository extends JpaRepository<ErrorLog, Long> {
@Param("serverId") Long serverId,
@Param("patternId") Long patternId,
@Param("severity") String severity,
@Param("filePath") String filePath,
@Param("startDate") LocalDateTime startDate,
@Param("endDate") LocalDateTime endDate,
@Param("keyword") String keyword,
Pageable pageable);
// 서버별 삭제
void deleteByServerId(Long serverId);
// 서버별 에러 수
long countByServerId(Long serverId);
// 전체 파일 경로 목록 (중복 제거)
@Query("SELECT DISTINCT e.filePath FROM ErrorLog e ORDER BY e.filePath")
List<String> findDistinctFilePaths();
// 서버별 파일 경로 목록 (중복 제거)
@Query("SELECT DISTINCT e.filePath FROM ErrorLog e WHERE e.server.id = :serverId ORDER BY e.filePath")
List<String> findDistinctFilePathsByServerId(@Param("serverId") Long serverId);
// ID 목록으로 삭제
@Modifying
@Query("DELETE FROM ErrorLog e WHERE e.id IN :ids")
int deleteByIdIn(@Param("ids") List<Long> ids);
// 서버+파일 기준 삭제
@Modifying
@Query("DELETE FROM ErrorLog e WHERE e.server.id = :serverId AND e.filePath = :filePath")
int deleteByServerIdAndFilePath(@Param("serverId") Long serverId, @Param("filePath") String filePath);
// 파일별 에러 통계
@Query("SELECT e.filePath, e.server.name, COUNT(e), " +
"SUM(CASE WHEN e.severity = 'CRITICAL' THEN 1 ELSE 0 END), " +
"SUM(CASE WHEN e.severity = 'ERROR' THEN 1 ELSE 0 END), " +
"SUM(CASE WHEN e.severity = 'WARN' THEN 1 ELSE 0 END), " +
"MAX(e.occurredAt) " +
"FROM ErrorLog e " +
"WHERE (:serverId IS NULL OR e.server.id = :serverId) " +
"GROUP BY e.filePath, e.server.name " +
"ORDER BY COUNT(e) DESC")
List<Object[]> getErrorStatsByFile(@Param("serverId") Long serverId);
// 서버별 에러 통계
@Query("SELECT e.server.id, e.server.name, COUNT(e), " +
"SUM(CASE WHEN e.severity = 'CRITICAL' THEN 1 ELSE 0 END), " +
"SUM(CASE WHEN e.severity = 'ERROR' THEN 1 ELSE 0 END), " +
"SUM(CASE WHEN e.severity = 'WARN' THEN 1 ELSE 0 END), " +
"MAX(e.occurredAt) " +
"FROM ErrorLog e " +
"GROUP BY e.server.id, e.server.name " +
"ORDER BY COUNT(e) DESC")
List<Object[]> getErrorStatsByServer();
// 패턴별 에러 통계
@Query("SELECT e.pattern.id, e.pattern.name, e.pattern.severity, COUNT(e), MAX(e.occurredAt) " +
"FROM ErrorLog e " +
"WHERE (:serverId IS NULL OR e.server.id = :serverId) " +
"GROUP BY e.pattern.id, e.pattern.name, e.pattern.severity " +
"ORDER BY COUNT(e) DESC")
List<Object[]> getErrorStatsByPattern(@Param("serverId") Long serverId);
// 트리용 파일별 에러 통계 (서버ID, 서버명, 파일경로, 에러수, CRITICAL, ERROR, WARN)
@Query("SELECT e.server.id, e.server.name, e.filePath, COUNT(e), " +
"SUM(CASE WHEN e.severity = 'CRITICAL' THEN 1 ELSE 0 END), " +
"SUM(CASE WHEN e.severity = 'ERROR' THEN 1 ELSE 0 END), " +
"SUM(CASE WHEN e.severity = 'WARN' THEN 1 ELSE 0 END) " +
"FROM ErrorLog e " +
"GROUP BY e.server.id, e.server.name, e.filePath " +
"ORDER BY e.server.name, e.filePath")
List<Object[]> getFileErrorStats();
// 서버별 일별 에러 통계 (대시보드, 월별현황용) - SQLite 네이티브 쿼리 (epoch ms 지원)
@Query(value = "SELECT e.server_id, s.name, date(e.occurred_at/1000, 'unixepoch', 'localtime'), COUNT(*), " +
"SUM(CASE WHEN e.severity = 'CRITICAL' THEN 1 ELSE 0 END), " +
"SUM(CASE WHEN e.severity = 'ERROR' THEN 1 ELSE 0 END), " +
"SUM(CASE WHEN e.severity = 'WARN' THEN 1 ELSE 0 END) " +
"FROM error_logs e JOIN servers s ON e.server_id = s.id " +
"WHERE e.occurred_at >= :startMs AND e.occurred_at < :endMs " +
"GROUP BY e.server_id, s.name, date(e.occurred_at/1000, 'unixepoch', 'localtime') " +
"ORDER BY e.server_id, date(e.occurred_at/1000, 'unixepoch', 'localtime')", nativeQuery = true)
List<Object[]> getDailyStatsByServer(@Param("startMs") long startMs,
@Param("endMs") long endMs);
// 서버별 시간대별 에러 통계 (일별현황용 - 원본 데이터, 5분 단위는 서비스에서 처리)
@Query("SELECT e.server.id, e.server.name, e.occurredAt, e.severity " +
"FROM ErrorLog e " +
"WHERE e.occurredAt >= :startDate AND e.occurredAt < :endDate " +
"ORDER BY e.server.id, e.occurredAt")
List<Object[]> getErrorsByDateRange(@Param("startDate") LocalDateTime startDate,
@Param("endDate") LocalDateTime endDate);
}

View File

@@ -1,6 +1,7 @@
package research.loghunter.repository;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import research.loghunter.entity.ScannedFile;
@@ -28,4 +29,14 @@ public interface ScannedFileRepository extends JpaRepository<ScannedFile, Long>
// 경로별 스캔 파일 수
long countByLogPathId(Long logPathId);
// 서버+파일경로 기준 삭제
@Modifying
@Query("DELETE FROM ScannedFile sf WHERE sf.serverId = :serverId AND sf.filePath = :filePath")
int deleteByServerIdAndFilePath(@Param("serverId") Long serverId, @Param("filePath") String filePath);
// 서버별 전체 삭제
@Modifying
@Query("DELETE FROM ScannedFile sf WHERE sf.serverId = :serverId")
int deleteByServerId(@Param("serverId") Long serverId);
}

View File

@@ -7,10 +7,16 @@ import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import research.loghunter.dto.ErrorLogDto;
import research.loghunter.dto.FileTreeDto;
import research.loghunter.entity.ErrorLog;
import research.loghunter.entity.Server;
import research.loghunter.repository.ErrorLogRepository;
import research.loghunter.repository.ScannedFileRepository;
import research.loghunter.repository.ServerRepository;
import java.time.LocalDateTime;
import java.util.*;
import java.util.stream.Collectors;
@Service
@RequiredArgsConstructor
@@ -18,11 +24,14 @@ import java.time.LocalDateTime;
public class ErrorLogService {
private final ErrorLogRepository errorLogRepository;
private final ScannedFileRepository scannedFileRepository;
private final ServerRepository serverRepository;
public Page<ErrorLogDto> search(
Long serverId,
Long patternId,
String severity,
String filePath,
LocalDateTime startDate,
LocalDateTime endDate,
String keyword,
@@ -31,7 +40,7 @@ public class ErrorLogService {
) {
Pageable pageable = PageRequest.of(page, size);
Page<ErrorLog> errorLogs = errorLogRepository.searchErrors(
serverId, patternId, severity, startDate, endDate, keyword, pageable);
serverId, patternId, severity, filePath, startDate, endDate, keyword, pageable);
return errorLogs.map(this::toDto);
}
@@ -48,6 +57,125 @@ public class ErrorLogService {
.orElseThrow(() -> new RuntimeException("ErrorLog not found: " + id));
}
/**
* 트리 구조 데이터 조회
*/
public List<FileTreeDto.ServerNode> getFileTree() {
// 파일별 에러 통계 조회
List<Object[]> stats = errorLogRepository.getFileErrorStats();
// 서버별로 그룹핑
Map<Long, List<Object[]>> serverGroups = new LinkedHashMap<>();
for (Object[] stat : stats) {
Long serverId = ((Number) stat[0]).longValue();
serverGroups.computeIfAbsent(serverId, k -> new ArrayList<>()).add(stat);
}
List<FileTreeDto.ServerNode> result = new ArrayList<>();
for (Map.Entry<Long, List<Object[]>> entry : serverGroups.entrySet()) {
Long serverId = entry.getKey();
List<Object[]> serverStats = entry.getValue();
String serverName = (String) serverStats.get(0)[1];
// 경로별로 그룹핑
Map<String, List<FileTreeDto.FileNode>> pathGroups = new LinkedHashMap<>();
int serverTotalErrors = 0;
for (Object[] stat : serverStats) {
String filePath = (String) stat[2];
int errorCount = ((Number) stat[3]).intValue();
int criticalCount = ((Number) stat[4]).intValue();
int errorLevelCount = ((Number) stat[5]).intValue();
int warnCount = ((Number) stat[6]).intValue();
// 경로와 파일명 분리
int lastSlash = filePath.lastIndexOf('/');
String path = lastSlash > 0 ? filePath.substring(0, lastSlash) : "/";
String fileName = lastSlash > 0 ? filePath.substring(lastSlash + 1) : filePath;
FileTreeDto.FileNode fileNode = FileTreeDto.FileNode.builder()
.filePath(filePath)
.fileName(fileName)
.errorCount(errorCount)
.criticalCount(criticalCount)
.errorLevelCount(errorLevelCount)
.warnCount(warnCount)
.build();
pathGroups.computeIfAbsent(path, k -> new ArrayList<>()).add(fileNode);
serverTotalErrors += errorCount;
}
// PathNode 생성
List<FileTreeDto.PathNode> pathNodes = new ArrayList<>();
for (Map.Entry<String, List<FileTreeDto.FileNode>> pathEntry : pathGroups.entrySet()) {
List<FileTreeDto.FileNode> files = pathEntry.getValue();
int pathTotalErrors = files.stream().mapToInt(FileTreeDto.FileNode::getErrorCount).sum();
pathNodes.add(FileTreeDto.PathNode.builder()
.path(pathEntry.getKey())
.totalErrorCount(pathTotalErrors)
.files(files)
.build());
}
result.add(FileTreeDto.ServerNode.builder()
.serverId(serverId)
.serverName(serverName)
.totalErrorCount(serverTotalErrors)
.paths(pathNodes)
.build());
}
return result;
}
/**
* 서버별 파일 목록 조회
*/
public List<String> getFilesByServer(Long serverId) {
if (serverId == null) {
return errorLogRepository.findDistinctFilePaths();
}
return errorLogRepository.findDistinctFilePathsByServerId(serverId);
}
/**
* 선택한 ID들의 에러 삭제
*/
@Transactional
public int deleteByIds(List<Long> ids) {
if (ids == null || ids.isEmpty()) {
return 0;
}
return errorLogRepository.deleteByIdIn(ids);
}
/**
* 파일 삭제 (에러로그 + 스캔기록)
*/
@Transactional
public Map<String, Object> deleteFileAndErrors(Long serverId, String filePath) {
int deletedErrors = errorLogRepository.deleteByServerIdAndFilePath(serverId, filePath);
int deletedFiles = scannedFileRepository.deleteByServerIdAndFilePath(serverId, filePath);
return Map.of(
"success", true,
"deletedErrors", deletedErrors,
"deletedScannedFiles", deletedFiles
);
}
/**
* 서버+파일 기준 에러 삭제
*/
@Transactional
public int deleteByServerAndFile(Long serverId, String filePath) {
return errorLogRepository.deleteByServerIdAndFilePath(serverId, filePath);
}
private ErrorLogDto toDto(ErrorLog errorLog) {
return ErrorLogDto.builder()
.id(errorLog.getId())

View File

@@ -141,6 +141,7 @@ public class ExportService {
request.serverId(),
request.patternId(),
request.severity(),
request.filePath(),
request.startDate(),
request.endDate(),
request.keyword(),
@@ -272,6 +273,7 @@ public class ExportService {
Long serverId,
Long patternId,
String severity,
String filePath,
LocalDateTime startDate,
LocalDateTime endDate,
String keyword

View File

@@ -14,6 +14,18 @@ import java.util.regex.Matcher;
@Slf4j
public class LogParserService {
// 로그 레벨 감지 패턴 (INFO, DEBUG, TRACE는 에러가 아님)
private static final java.util.regex.Pattern LOG_LEVEL_PATTERN = java.util.regex.Pattern.compile(
"\\b(INFO|DEBUG|TRACE|FINE|FINER|FINEST|CONFIG)\\b",
java.util.regex.Pattern.CASE_INSENSITIVE
);
// 에러 레벨 패턴 (ERROR, WARN, FATAL 등)
private static final java.util.regex.Pattern ERROR_LEVEL_PATTERN = java.util.regex.Pattern.compile(
"\\b(ERROR|WARN|WARNING|FATAL|SEVERE|CRITICAL)\\b",
java.util.regex.Pattern.CASE_INSENSITIVE
);
/**
* 로그 내용에서 패턴 매칭
*/
@@ -27,13 +39,37 @@ public class LogParserService {
try {
java.util.regex.Pattern compiledPattern = java.util.regex.Pattern.compile(pattern.getRegex());
// 제외 패턴 컴파일 (있는 경우)
java.util.regex.Pattern excludePattern = null;
if (pattern.getExcludeRegex() != null && !pattern.getExcludeRegex().isBlank()) {
try {
excludePattern = java.util.regex.Pattern.compile(pattern.getExcludeRegex());
} catch (Exception e) {
log.warn("Invalid exclude regex for pattern {}: {}", pattern.getName(), e.getMessage());
}
}
for (int i = 0; i < lines.length; i++) {
Matcher matcher = compiledPattern.matcher(lines[i]);
String line = lines[i];
Matcher matcher = compiledPattern.matcher(line);
if (matcher.find()) {
// 1. 제외 패턴 체크
if (excludePattern != null && excludePattern.matcher(line).find()) {
log.debug("Excluded by excludeRegex: {}", line.substring(0, Math.min(100, line.length())));
continue;
}
// 2. 로그 레벨 체크 - INFO/DEBUG/TRACE는 에러가 아님
if (isNonErrorLogLevel(line)) {
log.debug("Excluded by log level (INFO/DEBUG/TRACE): {}",
line.substring(0, Math.min(100, line.length())));
continue;
}
// 컨텍스트 추출
String context = extractContext(lines, i, pattern.getContextLines());
String summary = createSummary(lines[i]);
String summary = createSummary(line);
results.add(new MatchResult(
pattern,
@@ -53,6 +89,26 @@ public class LogParserService {
return results;
}
/**
* 로그 레벨이 INFO/DEBUG/TRACE인지 확인
* - 에러 레벨(ERROR/WARN 등)이 있으면 false
* - INFO/DEBUG/TRACE가 있으면 true (에러 아님)
*/
private boolean isNonErrorLogLevel(String line) {
// 에러 레벨이 있으면 에러임 (false 반환)
if (ERROR_LEVEL_PATTERN.matcher(line).find()) {
return false;
}
// INFO/DEBUG/TRACE가 있으면 에러 아님 (true 반환)
if (LOG_LEVEL_PATTERN.matcher(line).find()) {
return true;
}
// 레벨 정보가 없으면 패턴 매칭 결과를 사용 (에러로 간주)
return false;
}
/**
* 컨텍스트 추출 (에러 전후 라인)
*/
@@ -70,12 +126,32 @@ public class LogParserService {
return context.toString();
}
// 요약에서 제거할 날짜시간 패턴
private static final java.util.regex.Pattern DATETIME_PREFIX_PATTERN = java.util.regex.Pattern.compile(
"^\\s*" +
"(" +
"\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}[,.]?\\d{0,3}" + // 2026-01-06 10:35:23,456
"|\\d{2}-[A-Za-z]{3}-\\d{4} \\d{2}:\\d{2}:\\d{2}" + // 06-Jan-2026 10:35:23
"|[A-Za-z]{3} \\d{2},? \\d{4} \\d{2}:\\d{2}:\\d{2}" + // Jan 06, 2026 10:35:23
"|[A-Za-z]{3} \\d{2} \\d{2}:\\d{2}:\\d{2}" + // Jan 06 10:35:23
"|\\[\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}[,.]?\\d{0,3}\\]" + // [2026-01-06 10:35:23]
")" +
"\\s*"
);
/**
* 요약 생성 (첫 줄, 최대 200자)
* 요약 생성 (첫 줄, 최대 200자, 앞부분 날짜시간 제거)
*/
private String createSummary(String line) {
if (line == null) return "";
String trimmed = line.trim();
// 앞부분 날짜시간 제거
Matcher dtMatcher = DATETIME_PREFIX_PATTERN.matcher(trimmed);
if (dtMatcher.find()) {
trimmed = trimmed.substring(dtMatcher.end()).trim();
}
if (trimmed.length() <= 200) return trimmed;
return trimmed.substring(0, 200) + "...";
}

View File

@@ -39,10 +39,14 @@ public class PatternService {
@Transactional
public PatternDto create(PatternDto dto) {
validateRegex(dto.getRegex());
if (dto.getExcludeRegex() != null && !dto.getExcludeRegex().isBlank()) {
validateRegex(dto.getExcludeRegex());
}
Pattern pattern = Pattern.builder()
.name(dto.getName())
.regex(dto.getRegex())
.excludeRegex(dto.getExcludeRegex())
.severity(dto.getSeverity() != null ? dto.getSeverity() : "ERROR")
.contextLines(dto.getContextLines() != null ? dto.getContextLines() : 5)
.description(dto.getDescription())
@@ -56,12 +60,16 @@ public class PatternService {
@Transactional
public PatternDto update(Long id, PatternDto dto) {
validateRegex(dto.getRegex());
if (dto.getExcludeRegex() != null && !dto.getExcludeRegex().isBlank()) {
validateRegex(dto.getExcludeRegex());
}
Pattern pattern = patternRepository.findById(id)
.orElseThrow(() -> new RuntimeException("Pattern not found: " + id));
pattern.setName(dto.getName());
pattern.setRegex(dto.getRegex());
pattern.setExcludeRegex(dto.getExcludeRegex());
pattern.setSeverity(dto.getSeverity());
pattern.setContextLines(dto.getContextLines());
pattern.setDescription(dto.getDescription());
@@ -107,6 +115,7 @@ public class PatternService {
.id(pattern.getId())
.name(pattern.getName())
.regex(pattern.getRegex())
.excludeRegex(pattern.getExcludeRegex())
.severity(pattern.getSeverity())
.contextLines(pattern.getContextLines())
.description(pattern.getDescription())

View File

@@ -7,17 +7,17 @@ import org.springframework.transaction.annotation.Transactional;
import research.loghunter.entity.*;
import research.loghunter.repository.*;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@Service
@RequiredArgsConstructor
@@ -450,4 +450,339 @@ public class ScanService {
int errorsFound,
String error
) {}
// === 분석 결과 초기화 ===
/**
* 서버별 분석 결과 초기화
*/
@Transactional
public ResetResult resetScanData(Long serverId) {
long deletedErrors = errorLogRepository.countByServerId(serverId);
errorLogRepository.deleteByServerId(serverId);
List<ScannedFile> scannedFiles = scannedFileRepository.findByServerId(serverId);
int deletedFiles = scannedFiles.size();
scannedFileRepository.deleteAll(scannedFiles);
List<ScanHistory> histories = scanHistoryRepository.findByServerIdOrderByStartedAtDesc(serverId);
int deletedHistories = histories.size();
scanHistoryRepository.deleteAll(histories);
log.info("Reset scan data for server {}: {} errors, {} files, {} histories",
serverId, deletedErrors, deletedFiles, deletedHistories);
return new ResetResult((int) deletedErrors, deletedFiles, deletedHistories);
}
/**
* 전체 분석 결과 초기화
*/
@Transactional
public ResetResult resetAllScanData() {
long deletedErrors = errorLogRepository.count();
errorLogRepository.deleteAll();
long deletedFiles = scannedFileRepository.count();
scannedFileRepository.deleteAll();
long deletedHistories = scanHistoryRepository.count();
scanHistoryRepository.deleteAll();
log.info("Reset all scan data: {} errors, {} files, {} histories",
deletedErrors, deletedFiles, deletedHistories);
return new ResetResult((int) deletedErrors, (int) deletedFiles, (int) deletedHistories);
}
// === 에러 통계 ===
/**
* 파일별 에러 통계
*/
public List<FileErrorStats> getErrorStatsByFile(Long serverId) {
List<Object[]> results = errorLogRepository.getErrorStatsByFile(serverId);
return results.stream()
.map(row -> new FileErrorStats(
(String) row[0], // filePath
(String) row[1], // serverName
((Number) row[2]).intValue(), // totalCount
((Number) row[3]).intValue(), // criticalCount
((Number) row[4]).intValue(), // errorCount
((Number) row[5]).intValue(), // warnCount
(LocalDateTime) row[6] // lastOccurredAt
))
.toList();
}
/**
* 서버별 에러 통계
*/
public List<ServerErrorStats> getErrorStatsByServer() {
List<Object[]> results = errorLogRepository.getErrorStatsByServer();
return results.stream()
.map(row -> new ServerErrorStats(
((Number) row[0]).longValue(), // serverId
(String) row[1], // serverName
((Number) row[2]).intValue(), // totalCount
((Number) row[3]).intValue(), // criticalCount
((Number) row[4]).intValue(), // errorCount
((Number) row[5]).intValue(), // warnCount
(LocalDateTime) row[6] // lastOccurredAt
))
.toList();
}
/**
* 패턴별 에러 통계
*/
public List<PatternErrorStats> getErrorStatsByPattern(Long serverId) {
List<Object[]> results = errorLogRepository.getErrorStatsByPattern(serverId);
return results.stream()
.map(row -> new PatternErrorStats(
((Number) row[0]).longValue(), // patternId
(String) row[1], // patternName
(String) row[2], // severity
((Number) row[3]).intValue(), // count
(LocalDateTime) row[4] // lastOccurredAt
))
.toList();
}
/**
* 대시보드용: 서버별 최근 N일 일별 통계
*/
public List<ServerDailyStats> getDailyStatsByServer(int days) {
LocalDateTime endDate = LocalDate.now().plusDays(1).atStartOfDay(); // 내일 00:00
LocalDateTime startDate = LocalDate.now().minusDays(days - 1).atStartOfDay(); // N일 전 00:00
// epoch milliseconds로 변환
long startMs = startDate.atZone(java.time.ZoneId.systemDefault()).toInstant().toEpochMilli();
long endMs = endDate.atZone(java.time.ZoneId.systemDefault()).toInstant().toEpochMilli();
List<Object[]> results = errorLogRepository.getDailyStatsByServer(startMs, endMs);
// 서버별로 그룹화
Map<Long, ServerDailyStats> serverMap = new LinkedHashMap<>();
for (Object[] row : results) {
Long serverId = ((Number) row[0]).longValue();
String serverName = (String) row[1];
String dateStr = (String) row[2]; // SQLite date() 함수는 String 반환
int total = ((Number) row[3]).intValue();
int critical = ((Number) row[4]).intValue();
int error = ((Number) row[5]).intValue();
int warn = ((Number) row[6]).intValue();
serverMap.computeIfAbsent(serverId, k -> new ServerDailyStats(serverId, serverName, new ArrayList<>()))
.dailyStats().add(new DailyStat(dateStr, total, critical, error, warn));
}
// 모든 날짜 채우기 (데이터 없는 날짜는 0으로)
List<String> allDates = new ArrayList<>();
for (int i = days - 1; i >= 0; i--) {
allDates.add(LocalDate.now().minusDays(i).toString());
}
for (ServerDailyStats server : serverMap.values()) {
Map<String, DailyStat> dateMap = server.dailyStats().stream()
.collect(Collectors.toMap(DailyStat::date, s -> s));
List<DailyStat> filledStats = allDates.stream()
.map(date -> dateMap.getOrDefault(date, new DailyStat(date, 0, 0, 0, 0)))
.toList();
server.dailyStats().clear();
server.dailyStats().addAll(filledStats);
}
return new ArrayList<>(serverMap.values());
}
/**
* 월별현황용: 서버별 해당 월 일별 통계
*/
public List<ServerDailyStats> getMonthlyStatsByServer(int year, int month) {
LocalDate firstDay = LocalDate.of(year, month, 1);
LocalDate lastDay = firstDay.withDayOfMonth(firstDay.lengthOfMonth());
LocalDateTime startDate = firstDay.atStartOfDay();
LocalDateTime endDate = lastDay.plusDays(1).atStartOfDay();
// epoch milliseconds로 변환
long startMs = startDate.atZone(java.time.ZoneId.systemDefault()).toInstant().toEpochMilli();
long endMs = endDate.atZone(java.time.ZoneId.systemDefault()).toInstant().toEpochMilli();
log.info("Monthly stats query: year={}, month={}, startMs={}, endMs={}", year, month, startMs, endMs);
List<Object[]> results = errorLogRepository.getDailyStatsByServer(startMs, endMs);
log.info("Monthly stats results count: {}", results.size());
// 서버별로 그룹화
Map<Long, ServerDailyStats> serverMap = new LinkedHashMap<>();
for (Object[] row : results) {
Long serverId = ((Number) row[0]).longValue();
String serverName = (String) row[1];
String dateStr = (String) row[2]; // SQLite date() 함수는 String 반환
int total = ((Number) row[3]).intValue();
int critical = ((Number) row[4]).intValue();
int error = ((Number) row[5]).intValue();
int warn = ((Number) row[6]).intValue();
log.debug("Row: serverId={}, serverName={}, date={}, total={}", serverId, serverName, dateStr, total);
serverMap.computeIfAbsent(serverId, k -> new ServerDailyStats(serverId, serverName, new ArrayList<>()))
.dailyStats().add(new DailyStat(dateStr, total, critical, error, warn));
}
// 모든 날짜 채우기
List<String> allDates = new ArrayList<>();
for (int day = 1; day <= firstDay.lengthOfMonth(); day++) {
allDates.add(LocalDate.of(year, month, day).toString());
}
for (ServerDailyStats server : serverMap.values()) {
Map<String, DailyStat> dateMap = server.dailyStats().stream()
.collect(Collectors.toMap(DailyStat::date, s -> s));
List<DailyStat> filledStats = allDates.stream()
.map(date -> dateMap.getOrDefault(date, new DailyStat(date, 0, 0, 0, 0)))
.toList();
server.dailyStats().clear();
server.dailyStats().addAll(filledStats);
}
return new ArrayList<>(serverMap.values());
}
/**
* 일별현황용: 서버별 해당 날짜 5분 단위 통계
*/
public List<ServerTimeStats> getTimeStatsByServer(LocalDate date, int intervalMinutes) {
LocalDateTime startDate = date.atStartOfDay();
LocalDateTime endDate = date.plusDays(1).atStartOfDay();
List<Object[]> results = errorLogRepository.getErrorsByDateRange(startDate, endDate);
// 서버별로 그룹화
Map<Long, ServerTimeStats> serverMap = new LinkedHashMap<>();
// 시간 슬롯 초기화 (5분 단위 = 288개)
int slots = 24 * 60 / intervalMinutes;
for (Object[] row : results) {
Long serverId = ((Number) row[0]).longValue();
String serverName = (String) row[1];
LocalDateTime occurredAt = (LocalDateTime) row[2];
String severity = (String) row[3];
ServerTimeStats stats = serverMap.computeIfAbsent(serverId,
k -> new ServerTimeStats(serverId, serverName, initTimeSlots(slots, intervalMinutes)));
// 해당 시간 슬롯 찾기
int minuteOfDay = occurredAt.getHour() * 60 + occurredAt.getMinute();
int slotIndex = minuteOfDay / intervalMinutes;
if (slotIndex < stats.timeStats().size()) {
TimeStat slot = stats.timeStats().get(slotIndex);
slot.incrementTotal();
switch (severity) {
case "CRITICAL" -> slot.incrementCritical();
case "ERROR" -> slot.incrementError();
case "WARN" -> slot.incrementWarn();
}
}
}
return new ArrayList<>(serverMap.values());
}
private List<TimeStat> initTimeSlots(int slots, int intervalMinutes) {
List<TimeStat> timeStats = new ArrayList<>();
for (int i = 0; i < slots; i++) {
int minutes = i * intervalMinutes;
String time = String.format("%02d:%02d", minutes / 60, minutes % 60);
timeStats.add(new TimeStat(time));
}
return timeStats;
}
// DTOs for stats
public record ResetResult(int deletedErrors, int deletedFiles, int deletedHistories) {}
public record FileErrorStats(
String filePath,
String serverName,
int totalCount,
int criticalCount,
int errorCount,
int warnCount,
LocalDateTime lastOccurredAt
) {}
public record ServerErrorStats(
Long serverId,
String serverName,
int totalCount,
int criticalCount,
int errorCount,
int warnCount,
LocalDateTime lastOccurredAt
) {}
public record PatternErrorStats(
Long patternId,
String patternName,
String severity,
int count,
LocalDateTime lastOccurredAt
) {}
// 일별 통계 DTO
public record ServerDailyStats(
Long serverId,
String serverName,
List<DailyStat> dailyStats
) {}
public record DailyStat(
String date,
int total,
int critical,
int error,
int warn
) {}
// 시간별 통계 DTO
public record ServerTimeStats(
Long serverId,
String serverName,
List<TimeStat> timeStats
) {}
public static class TimeStat {
private final String time;
private int total;
private int critical;
private int error;
private int warn;
public TimeStat(String time) {
this.time = time;
}
public String getTime() { return time; }
public int getTotal() { return total; }
public int getCritical() { return critical; }
public int getError() { return error; }
public int getWarn() { return warn; }
public void incrementTotal() { total++; }
public void incrementCritical() { critical++; }
public void incrementError() { error++; }
public void incrementWarn() { warn++; }
}
}

View File

@@ -190,14 +190,20 @@ public class SftpService {
return detail.toString();
}
// 분석 대상 파일 최대 기간 (1개월)
private static final int MAX_FILE_AGE_DAYS = 31;
/**
* 지정된 경로의 모든 파일 목록 조회 (패턴 매칭 포함)
* 지정된 경로의 모든 파일 목록 조회 (패턴 매칭 포함, 1개월 이내 파일만)
*/
public List<RemoteFile> listAllFiles(Server server, ServerLogPath logPath) {
List<RemoteFile> files = new ArrayList<>();
Session session = null;
ChannelSftp channel = null;
// 1개월 전 기준 시간
LocalDateTime cutoffTime = LocalDateTime.now().minusDays(MAX_FILE_AGE_DAYS);
try {
session = createSession(server);
session.connect(CONNECT_TIMEOUT);
@@ -212,6 +218,7 @@ public class SftpService {
@SuppressWarnings("unchecked")
Vector<ChannelSftp.LsEntry> entries = channel.ls(path);
int skippedOldFiles = 0;
for (ChannelSftp.LsEntry entry : entries) {
if (entry.getAttrs().isDir()) continue;
@@ -222,6 +229,12 @@ public class SftpService {
LocalDateTime fileTime = LocalDateTime.ofInstant(
Instant.ofEpochMilli(mtime), ZoneId.systemDefault());
// 1개월 이전 파일은 제외
if (fileTime.isBefore(cutoffTime)) {
skippedOldFiles++;
continue;
}
files.add(new RemoteFile(
path + (path.endsWith("/") ? "" : "/") + fileName,
fileName,
@@ -233,8 +246,8 @@ public class SftpService {
// 최신 파일 순 정렬
files.sort((a, b) -> b.modifiedAt().compareTo(a.modifiedAt()));
log.info("Found {} files matching pattern '{}' in path '{}' on server {}",
files.size(), filePattern, path, server.getId());
log.info("Found {} files (skipped {} old files > {} days) matching pattern '{}' in path '{}' on server {}",
files.size(), skippedOldFiles, MAX_FILE_AGE_DAYS, filePattern, path, server.getId());
} catch (Exception e) {
log.error("Failed to list files from server {}: {}", server.getId(), e.getMessage());