Browse Source

blood-analysis init

master
xueyinfei 3 months ago
commit
6db74a39ff
  1. 2
      .gitattributes
  2. 33
      .gitignore
  3. 19
      .mvn/wrapper/maven-wrapper.properties
  4. 259
      mvnw
  5. 149
      mvnw.cmd
  6. 206
      pom.xml
  7. 15
      src/main/java/com/guozhi/bloodanalysis/BloodAnalysisApplication.java
  8. 106
      src/main/java/com/guozhi/bloodanalysis/config/BusinessAspect.java
  9. 36
      src/main/java/com/guozhi/bloodanalysis/config/CorsConfig.java
  10. 40
      src/main/java/com/guozhi/bloodanalysis/config/DataSourceConfig.java
  11. 40
      src/main/java/com/guozhi/bloodanalysis/config/ExecutorConfig.java
  12. 33
      src/main/java/com/guozhi/bloodanalysis/config/GlobalRestExceptionHandler.java
  13. 159
      src/main/java/com/guozhi/bloodanalysis/config/MasterDataSourceConfig.java
  14. 43
      src/main/java/com/guozhi/bloodanalysis/config/SwaggerConfig.java
  15. 36
      src/main/java/com/guozhi/bloodanalysis/controller/BloodAnalysisController.java
  16. 17
      src/main/java/com/guozhi/bloodanalysis/entity/DataLineageInfo.java
  17. 28
      src/main/java/com/guozhi/bloodanalysis/entity/MetaBloodAnalysis.java
  18. 18
      src/main/java/com/guozhi/bloodanalysis/entity/MetaColumn.java
  19. 19
      src/main/java/com/guozhi/bloodanalysis/exception/BusinessException.java
  20. 11
      src/main/java/com/guozhi/bloodanalysis/mapper/DataLineageInfoMapper.java
  21. 25
      src/main/java/com/guozhi/bloodanalysis/mapper/MetaBloodAnalysisMapper.java
  22. 302
      src/main/java/com/guozhi/bloodanalysis/parser/SqlParser.java
  23. 145
      src/main/java/com/guozhi/bloodanalysis/parser/clean/GenericLogNormalizer.java
  24. 95
      src/main/java/com/guozhi/bloodanalysis/parser/clean/GenericLogNormalizerGP.java
  25. 74
      src/main/java/com/guozhi/bloodanalysis/parser/clean/GenericLogNormalizerOra.java
  26. 115
      src/main/java/com/guozhi/bloodanalysis/parser/clean/GenericLogNormalizerTrd.java
  27. 62
      src/main/java/com/guozhi/bloodanalysis/parser/common/AlterParser.java
  28. 128
      src/main/java/com/guozhi/bloodanalysis/parser/common/AsTableParser.java
  29. 202
      src/main/java/com/guozhi/bloodanalysis/parser/common/BaseParser.java
  30. 194
      src/main/java/com/guozhi/bloodanalysis/parser/common/CreateParser.java
  31. 41
      src/main/java/com/guozhi/bloodanalysis/parser/common/DeleteParser.java
  32. 50
      src/main/java/com/guozhi/bloodanalysis/parser/common/DropParser.java
  33. 211
      src/main/java/com/guozhi/bloodanalysis/parser/common/InsertParser.java
  34. 141
      src/main/java/com/guozhi/bloodanalysis/parser/common/ParserContext.java
  35. 294
      src/main/java/com/guozhi/bloodanalysis/parser/common/SelectParser.java
  36. 179
      src/main/java/com/guozhi/bloodanalysis/parser/common/UpdateParser.java
  37. 189
      src/main/java/com/guozhi/bloodanalysis/parser/utils/ColumnRefFinder.java
  38. 29
      src/main/java/com/guozhi/bloodanalysis/parser/utils/Constants.java
  39. 133
      src/main/java/com/guozhi/bloodanalysis/parser/utils/CryptUtils.java
  40. 5
      src/main/java/com/guozhi/bloodanalysis/parser/utils/DatabaseType.java
  41. 14
      src/main/java/com/guozhi/bloodanalysis/parser/utils/ErrorRecorder.java
  42. 125
      src/main/java/com/guozhi/bloodanalysis/parser/utils/ExportParseResultUtil.java
  43. 421
      src/main/java/com/guozhi/bloodanalysis/parser/utils/ExprToColumn.java
  44. 76
      src/main/java/com/guozhi/bloodanalysis/parser/utils/KColumnProvider.java
  45. 81
      src/main/java/com/guozhi/bloodanalysis/parser/utils/KDatabaseProvider.java
  46. 49
      src/main/java/com/guozhi/bloodanalysis/parser/utils/PartitionTool.java
  47. 123
      src/main/java/com/guozhi/bloodanalysis/parser/utils/SpUtils.java
  48. 9
      src/main/java/com/guozhi/bloodanalysis/parser/vo/ClauseType.java
  49. 124
      src/main/java/com/guozhi/bloodanalysis/parser/vo/ExcelCell.java
  50. 72
      src/main/java/com/guozhi/bloodanalysis/parser/vo/ExcelSheet.java
  51. 146
      src/main/java/com/guozhi/bloodanalysis/parser/vo/KColumn.java
  52. 141
      src/main/java/com/guozhi/bloodanalysis/parser/vo/VTable.java
  53. 110
      src/main/java/com/guozhi/bloodanalysis/service/BloodAnalysisService.java
  54. 81
      src/main/java/com/guozhi/bloodanalysis/utils/ApiResult.java
  55. 15
      src/main/java/com/guozhi/bloodanalysis/utils/DateUtils.java
  56. 31
      src/main/java/com/guozhi/bloodanalysis/utils/EncryptUtils.java
  57. 588
      src/main/java/com/guozhi/bloodanalysis/utils/RedisUtils.java
  58. 74
      src/main/resources/application.yml
  59. 25
      src/main/resources/banner.txt
  60. 101
      src/main/resources/log4j2-dev.xml
  61. 8
      src/main/resources/mapper/DataLineageInfoMapper.xml
  62. 55
      src/main/resources/mapper/MetaBloodAnalysisMapper.xml
  63. 13
      src/test/java/com/guozhi/bloodanalysis/BloodAnalysisApplicationTests.java

2
.gitattributes

@ -0,0 +1,2 @@
/mvnw text eol=lf
*.cmd text eol=crlf

33
.gitignore

@ -0,0 +1,33 @@
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/

19
.mvn/wrapper/maven-wrapper.properties

@ -0,0 +1,19 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
wrapperVersion=3.3.2
distributionType=only-script
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip

259
mvnw

@ -0,0 +1,259 @@
#!/bin/sh
# ----------------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Apache Maven Wrapper startup batch script, version 3.3.2
#
# Optional ENV vars
# -----------------
# JAVA_HOME - location of a JDK home dir, required when download maven via java source
# MVNW_REPOURL - repo url base for downloading maven distribution
# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output
# ----------------------------------------------------------------------------
set -euf
[ "${MVNW_VERBOSE-}" != debug ] || set -x
# OS specific support.
native_path() { printf %s\\n "$1"; }
case "$(uname)" in
CYGWIN* | MINGW*)
[ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")"
native_path() { cygpath --path --windows "$1"; }
;;
esac
# set JAVACMD and JAVACCMD
set_java_home() {
# For Cygwin and MinGW, ensure paths are in Unix format before anything is touched
if [ -n "${JAVA_HOME-}" ]; then
if [ -x "$JAVA_HOME/jre/sh/java" ]; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
JAVACCMD="$JAVA_HOME/jre/sh/javac"
else
JAVACMD="$JAVA_HOME/bin/java"
JAVACCMD="$JAVA_HOME/bin/javac"
if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then
echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2
echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2
return 1
fi
fi
else
JAVACMD="$(
'set' +e
'unset' -f command 2>/dev/null
'command' -v java
)" || :
JAVACCMD="$(
'set' +e
'unset' -f command 2>/dev/null
'command' -v javac
)" || :
if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then
echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2
return 1
fi
fi
}
# hash string like Java String::hashCode
hash_string() {
str="${1:-}" h=0
while [ -n "$str" ]; do
char="${str%"${str#?}"}"
h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296))
str="${str#?}"
done
printf %x\\n $h
}
verbose() { :; }
[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; }
die() {
printf %s\\n "$1" >&2
exit 1
}
trim() {
# MWRAPPER-139:
# Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds.
# Needed for removing poorly interpreted newline sequences when running in more
# exotic environments such as mingw bash on Windows.
printf "%s" "${1}" | tr -d '[:space:]'
}
# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties
while IFS="=" read -r key value; do
case "${key-}" in
distributionUrl) distributionUrl=$(trim "${value-}") ;;
distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;;
esac
done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties"
[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties"
case "${distributionUrl##*/}" in
maven-mvnd-*bin.*)
MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/
case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in
*AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;;
:Darwin*x86_64) distributionPlatform=darwin-amd64 ;;
:Darwin*arm64) distributionPlatform=darwin-aarch64 ;;
:Linux*x86_64*) distributionPlatform=linux-amd64 ;;
*)
echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2
distributionPlatform=linux-amd64
;;
esac
distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip"
;;
maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;;
*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;;
esac
# apply MVNW_REPOURL and calculate MAVEN_HOME
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}"
distributionUrlName="${distributionUrl##*/}"
distributionUrlNameMain="${distributionUrlName%.*}"
distributionUrlNameMain="${distributionUrlNameMain%-bin}"
MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}"
MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")"
exec_maven() {
unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || :
exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD"
}
if [ -d "$MAVEN_HOME" ]; then
verbose "found existing MAVEN_HOME at $MAVEN_HOME"
exec_maven "$@"
fi
case "${distributionUrl-}" in
*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;;
*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;;
esac
# prepare tmp dir
if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then
clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; }
trap clean HUP INT TERM EXIT
else
die "cannot create temp dir"
fi
mkdir -p -- "${MAVEN_HOME%/*}"
# Download and Install Apache Maven
verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
verbose "Downloading from: $distributionUrl"
verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
# select .zip or .tar.gz
if ! command -v unzip >/dev/null; then
distributionUrl="${distributionUrl%.zip}.tar.gz"
distributionUrlName="${distributionUrl##*/}"
fi
# verbose opt
__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR=''
[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v
# normalize http auth
case "${MVNW_PASSWORD:+has-password}" in
'') MVNW_USERNAME='' MVNW_PASSWORD='' ;;
has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;;
esac
if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then
verbose "Found wget ... using wget"
wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl"
elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then
verbose "Found curl ... using curl"
curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl"
elif set_java_home; then
verbose "Falling back to use Java to download"
javaSource="$TMP_DOWNLOAD_DIR/Downloader.java"
targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName"
cat >"$javaSource" <<-END
public class Downloader extends java.net.Authenticator
{
protected java.net.PasswordAuthentication getPasswordAuthentication()
{
return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() );
}
public static void main( String[] args ) throws Exception
{
setDefault( new Downloader() );
java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() );
}
}
END
# For Cygwin/MinGW, switch paths to Windows format before running javac and java
verbose " - Compiling Downloader.java ..."
"$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java"
verbose " - Running Downloader.java ..."
"$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")"
fi
# If specified, validate the SHA-256 sum of the Maven distribution zip file
if [ -n "${distributionSha256Sum-}" ]; then
distributionSha256Result=false
if [ "$MVN_CMD" = mvnd.sh ]; then
echo "Checksum validation is not supported for maven-mvnd." >&2
echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1
elif command -v sha256sum >/dev/null; then
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then
distributionSha256Result=true
fi
elif command -v shasum >/dev/null; then
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then
distributionSha256Result=true
fi
else
echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2
echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1
fi
if [ $distributionSha256Result = false ]; then
echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2
echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2
exit 1
fi
fi
# unzip and move
if command -v unzip >/dev/null; then
unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip"
else
tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar"
fi
printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url"
mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME"
clean || :
exec_maven "$@"

149
mvnw.cmd

@ -0,0 +1,149 @@
<# : batch portion
@REM ----------------------------------------------------------------------------
@REM Licensed to the Apache Software Foundation (ASF) under one
@REM or more contributor license agreements. See the NOTICE file
@REM distributed with this work for additional information
@REM regarding copyright ownership. The ASF licenses this file
@REM to you under the Apache License, Version 2.0 (the
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
@REM http://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@REM KIND, either express or implied. See the License for the
@REM specific language governing permissions and limitations
@REM under the License.
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
@REM Apache Maven Wrapper startup batch script, version 3.3.2
@REM
@REM Optional ENV vars
@REM MVNW_REPOURL - repo url base for downloading maven distribution
@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output
@REM ----------------------------------------------------------------------------
@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0)
@SET __MVNW_CMD__=
@SET __MVNW_ERROR__=
@SET __MVNW_PSMODULEP_SAVE=%PSModulePath%
@SET PSModulePath=
@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @(
IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B)
)
@SET PSModulePath=%__MVNW_PSMODULEP_SAVE%
@SET __MVNW_PSMODULEP_SAVE=
@SET __MVNW_ARG0_NAME__=
@SET MVNW_USERNAME=
@SET MVNW_PASSWORD=
@IF NOT "%__MVNW_CMD__%"=="" (%__MVNW_CMD__% %*)
@echo Cannot start maven from wrapper >&2 && exit /b 1
@GOTO :EOF
: end batch / begin powershell #>
$ErrorActionPreference = "Stop"
if ($env:MVNW_VERBOSE -eq "true") {
$VerbosePreference = "Continue"
}
# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties
$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl
if (!$distributionUrl) {
Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties"
}
switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) {
"maven-mvnd-*" {
$USE_MVND = $true
$distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip"
$MVN_CMD = "mvnd.cmd"
break
}
default {
$USE_MVND = $false
$MVN_CMD = $script -replace '^mvnw','mvn'
break
}
}
# apply MVNW_REPOURL and calculate MAVEN_HOME
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
if ($env:MVNW_REPOURL) {
$MVNW_REPO_PATTERN = if ($USE_MVND) { "/org/apache/maven/" } else { "/maven/mvnd/" }
$distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace '^.*'+$MVNW_REPO_PATTERN,'')"
}
$distributionUrlName = $distributionUrl -replace '^.*/',''
$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$',''
$MAVEN_HOME_PARENT = "$HOME/.m2/wrapper/dists/$distributionUrlNameMain"
if ($env:MAVEN_USER_HOME) {
$MAVEN_HOME_PARENT = "$env:MAVEN_USER_HOME/wrapper/dists/$distributionUrlNameMain"
}
$MAVEN_HOME_NAME = ([System.Security.Cryptography.MD5]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join ''
$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME"
if (Test-Path -Path "$MAVEN_HOME" -PathType Container) {
Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME"
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"
exit $?
}
if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) {
Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl"
}
# prepare tmp dir
$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile
$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir"
$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null
trap {
if ($TMP_DOWNLOAD_DIR.Exists) {
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
}
}
New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null
# Download and Install Apache Maven
Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
Write-Verbose "Downloading from: $distributionUrl"
Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
$webclient = New-Object System.Net.WebClient
if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) {
$webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD)
}
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null
# If specified, validate the SHA-256 sum of the Maven distribution zip file
$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum
if ($distributionSha256Sum) {
if ($USE_MVND) {
Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties."
}
Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash
if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) {
Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property."
}
}
# unzip and move
Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null
Rename-Item -Path "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" -NewName $MAVEN_HOME_NAME | Out-Null
try {
Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null
} catch {
if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) {
Write-Error "fail to move MAVEN_HOME"
}
} finally {
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
}
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"

206
pom.xml

@ -0,0 +1,206 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.7.8</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.guozhi</groupId>
<artifactId>blood-analysis</artifactId>
<version>0.0.1</version>
<name>blood-analysis</name>
<description>blood-analysis</description>
<url/>
<licenses>
<license/>
</licenses>
<developers>
<developer/>
</developers>
<scm>
<connection/>
<developerConnection/>
<tag/>
<url/>
</scm>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>2.9.2</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>2.9.2</version>
</dependency>
<!-- MyBatis -->
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
</exclusion>
</exclusions>
<version>3.0.0</version>
</dependency>
<dependency>
<groupId>com.github.pagehelper</groupId>
<artifactId>pagehelper-spring-boot-starter</artifactId>
<version>2.1.0</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.25</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>1.1.20</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.springframework.boot</groupId>-->
<!-- <artifactId>spring-boot-starter-security</artifactId>-->
<!-- </dependency>-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>jakarta.validation</groupId>
<artifactId>jakarta.validation-api</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>io.nats</groupId>
<artifactId>jnats</artifactId>
<version>2.6.5</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>2.0.21</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.12.0</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.17.0</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.17.0</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.17.0</version>
</dependency>
<dependency>
<groupId>com.guozhi</groupId>
<artifactId>sqlparser</artifactId>
<version>1.0</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>4.1.2</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>4.1.2</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<mainClass>com.guozhi.bloodanalysis.BloodAnalysisApplication</mainClass>
<!-- <skip>true</skip> -->
<excludes>
<exclude>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</project>

15
src/main/java/com/guozhi/bloodanalysis/BloodAnalysisApplication.java

@ -0,0 +1,15 @@
package com.guozhi.bloodanalysis;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableAsync;
@SpringBootApplication
@EnableAsync
public class BloodAnalysisApplication {
public static void main(String[] args) {
SpringApplication.run(BloodAnalysisApplication.class, args);
}
}

106
src/main/java/com/guozhi/bloodanalysis/config/BusinessAspect.java

@ -0,0 +1,106 @@
//package com.guozhi.bloodanalysis.config;
//
//import com.guozhi.bloodanalysis.utils.RedisUtils;
//import lombok.extern.slf4j.Slf4j;
//import org.aspectj.lang.JoinPoint;
//import org.aspectj.lang.ProceedingJoinPoint;
//import org.aspectj.lang.annotation.AfterReturning;
//import org.aspectj.lang.annotation.Around;
//import org.aspectj.lang.annotation.Aspect;
//import org.aspectj.lang.annotation.Pointcut;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.stereotype.Component;
//
//import javax.servlet.http.HttpServletRequest;
//
///**
// * 切面处理层:使用@Pointcut定义你针对某些函数(一般来说是接口函数)的切点
// * 使用@Around处理你定义的切点对应的处理逻辑、实例中给出了两种处理逻辑
// * signFeild:前后台的验签逻辑、为了保证调用者的合法性和数据一致性
// * oauthFeild: 获取用户信息的逻辑、优先从网关的头部参数中获取,如果没有取到,
// * 应用将会请求oauth,用请求参数或头部参数中的refresh_token获取用户信息,同时进行接口权限的判断。
// */
//@Slf4j
//@Component
//@Aspect
//public class BusinessAspect {
//
// @Autowired
// HttpServletRequest request;
//
// @Autowired
// private RedisUtils redisUtils;
//
//// @Autowired
//// UserRoleMapper userRoleMapper;
////
//// @Autowired
//// PermissionMapper permissionMapper;
////
//// @Autowired
//// MenuFunctionMapper menuFunctionMapper;
//
//
//// @Pointcut("execution(* com.guozhi.bloodanalysis.controller.SystemController.*(..))")
//// public void ApiController() {}
//
//
//// @Pointcut("within(@org.springframework.web.bind.annotation.RestController *) &&"+
//// "!ApiController()")
//// public void OauthAround() {}
//
//// @Around("OauthAround()")
//// public Object OauthAround(ProceedingJoinPoint point) throws Throwable {
//// log.info("--------ApiController PointCut Start--------");
//// Object[] objects = point.getArgs();
//// String token = request.getHeader("token");
//// if (StringUtils.isEmpty(token)){
//// throw new BusinessException("tokenError","登录信息已过期,请重新登录");
//// }
//// User user = (User)redisUtils.get(token);
//// if (user == null || StringUtils.isEmpty(user.getUserName())){
//// throw new BusinessException("tokenError","登录信息已过期,请重新登录");
//// }
//// redisUtils.expire(token,1800);
//// if (!user.isAdmin()){
//// //todo 权限验证
//// List<UserRole> userRoleList = userRoleMapper.getUserRoleByUserName(user.getUserName());
//// if (CollectionUtils.isEmpty(userRoleList)){
//// throw new BusinessException("hasNoAuth","无此功能权限,联系管理员");
//// }
//// List<String> roleCodes = userRoleList.stream().map(UserRole::getRoleCode).collect(Collectors.toList());
//// List<Permission> permissions = permissionMapper.getFunctionPermissionByRoleCodeList(roleCodes);
//// if (CollectionUtils.isEmpty(permissions)){
//// throw new BusinessException("hasNoAuth","无此功能权限,联系管理员");
//// }
//// boolean hasPermission = false;
//// List<String> functionList = permissions.stream().map(Permission::getPermissionCode).collect(Collectors.toList());
//// List<MenuFunction> menuFunctionList = menuFunctionMapper.getMenuFunctionByCodeList(functionList);
//// if (CollectionUtils.isEmpty(menuFunctionList)){
//// throw new BusinessException("hasNoAuth","无此功能权限,联系管理员");
//// }
//// for (MenuFunction menuFunction : menuFunctionList) {
//// if (new AntPathRequestMatcher(menuFunction.getUrl()).matches(request)){
//// hasPermission = true;
//// break;
//// }
//// }
//// if (hasPermission){
//// return point.proceed(objects);
//// }else{
//// throw new BusinessException("hasNoAuth","无此功能权限,联系管理员");
//// }
//// }
//// return point.proceed(objects);
//// }
//
// @AfterReturning(returning = "rvt", pointcut = "within(@org.springframework.web.bind.annotation.RestController *)"+
// " || " +
// "within(@org.springframework.stereotype.Controller *) " +
// "execution(* *(..))")
// public Object afterExec(JoinPoint joinPoint, Object rvt) {
//// log.info("--------AfterReturningResult:"+ JSON.toJSONString(rvt));
// return rvt;
// }
//
//}

36
src/main/java/com/guozhi/bloodanalysis/config/CorsConfig.java

@ -0,0 +1,36 @@
package com.guozhi.bloodanalysis.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.cors.CorsConfiguration;
import org.springframework.web.cors.UrlBasedCorsConfigurationSource;
import org.springframework.web.filter.CorsFilter;
@Configuration // 一定不要忽略此注解
//public class CorsConfig implements WebMvcConfigurer {
public class CorsConfig{
private CorsConfiguration buildConfig() {
CorsConfiguration corsConfiguration = new CorsConfiguration();
corsConfiguration.addAllowedOrigin("*"); // 1允许任何域名使用
corsConfiguration.addAllowedHeader("*"); // 2允许任何头
corsConfiguration.addAllowedMethod("*"); // 3允许任何方法(post、get等)
corsConfiguration.setAllowCredentials(false);// 允许跨域带上cookies
return corsConfiguration;
}
@Bean
public CorsFilter corsFilter() {
UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
source.registerCorsConfiguration("/**", buildConfig()); // 4
return new CorsFilter(source);
}
// @Override
// public void addCorsMappings(CorsRegistry registry) {
// registry.addMapping("/**") // 所有接口
// .allowCredentials(true) // 是否发送 Cookie
// .allowedOriginPatterns("*") // 支持域
// .allowedMethods("GET", "POST", "PUT", "DELETE") // 支持方法
// .allowedHeaders("*")
// .exposedHeaders("*");
// }
}

40
src/main/java/com/guozhi/bloodanalysis/config/DataSourceConfig.java

@ -0,0 +1,40 @@
package com.guozhi.bloodanalysis.config;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.support.http.StatViewServlet;
import com.alibaba.druid.support.http.WebStatFilter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.boot.web.servlet.ServletRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import javax.sql.DataSource;
@Configuration
public class DataSourceConfig {
@Bean
public ServletRegistrationBean druidServlet() {// 主要实现web监控的配置处理
ServletRegistrationBean servletRegistrationBean = new ServletRegistrationBean(new StatViewServlet(), "/druid/*");//表示进行druid监控的配置处理操作
// servletRegistrationBean.addInitParameter("allow", "127.0.0.1,129.168.1.11");//白名单
// servletRegistrationBean.addInitParameter("deny", "129.168.1.12");//黑名单
servletRegistrationBean.addInitParameter("loginUsername", "admin");//用户名
servletRegistrationBean.addInitParameter("loginPassword", "123456");//密码
servletRegistrationBean.addInitParameter("resetEnable", "false");//是否可以重置数据源
return servletRegistrationBean;
}
@Bean //监控
public FilterRegistrationBean filterRegistrationBean(){
FilterRegistrationBean filterRegistrationBean=new FilterRegistrationBean();
filterRegistrationBean.setFilter(new WebStatFilter());
filterRegistrationBean.addUrlPatterns("/*");//所有请求进行监控处理
filterRegistrationBean.addInitParameter("exclusions", "*.js,*.gif,*.jpg,*.css,/druid/*");//排除
return filterRegistrationBean;
}
@Bean
@ConfigurationProperties(prefix = "spring.datasource.druid")
public DataSource druidDataSource() {
return new DruidDataSource();
}
}

40
src/main/java/com/guozhi/bloodanalysis/config/ExecutorConfig.java

@ -0,0 +1,40 @@
package com.guozhi.bloodanalysis.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.AsyncConfigurer;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.concurrent.Executor;
@Configuration
public class ExecutorConfig implements AsyncConfigurer {
// ThredPoolTaskExcutor的处理流程
// 当池子大小小于corePoolSize,就新建线程,并处理请求
// 当池子大小等于corePoolSize,把请求放入workQueue中,池子里的空闲线程就去workQueue中取任务并处理
// 当workQueue放不下任务时,就新建线程入池,并处理请求,
// 如果池子大小撑到了maximumPoolSize,就用RejectedExecutionHandler来做拒绝处理
// 当池子的线程数大于corePoolSize时,多余的线程会等待keepAliveTime长时间,如果无请求可处理就自行销毁
//getAsyncExecutor:自定义线程池,若不重写会使用默认的线程池。
@Override
@Bean
public Executor getAsyncExecutor() {
ThreadPoolTaskExecutor threadPool = new ThreadPoolTaskExecutor();
//设置核心线程数
threadPool.setCorePoolSize(5);
//设置最大线程数
threadPool.setMaxPoolSize(10);
//线程池所使用的缓冲队列
threadPool.setQueueCapacity(10);
//等待任务在关机时完成--表明等待所有线程执行完
threadPool.setWaitForTasksToCompleteOnShutdown(true);
// 等待时间 (默认为0,此时立即停止),并没等待xx秒后强制停止
threadPool.setAwaitTerminationSeconds(60);
// 线程名称前缀
threadPool.setThreadNamePrefix("ThreadPoolTaskExecutor-");
// 初始化线程
threadPool.initialize();
return threadPool;
}
}

33
src/main/java/com/guozhi/bloodanalysis/config/GlobalRestExceptionHandler.java

@ -0,0 +1,33 @@
package com.guozhi.bloodanalysis.config;
import com.guozhi.bloodanalysis.exception.BusinessException;
import com.guozhi.bloodanalysis.utils.ApiResult;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
/**
* 全局的异常处理类
*/
@ControllerAdvice(annotations = {RestController.class , Controller.class})
@ResponseBody
@Slf4j
public class GlobalRestExceptionHandler {
@ExceptionHandler(value = com.guozhi.bloodanalysis.exception.BusinessException.class)
public ApiResult easiBaseSysExceptionHandler(Exception e) {
log.error(((BusinessException) e).getCode(),e.getMessage());
ApiResult result = ApiResult.error( 999 , e.getMessage());
return result;
}
@ExceptionHandler(value = Exception.class)
public ApiResult exceptionHandler(Exception e) {
e.printStackTrace();
ApiResult result = ApiResult.error(999, e.getMessage());
return result;
}
}

159
src/main/java/com/guozhi/bloodanalysis/config/MasterDataSourceConfig.java

@ -0,0 +1,159 @@
package com.guozhi.bloodanalysis.config;
import com.alibaba.druid.pool.DruidDataSource;
import com.github.pagehelper.PageInterceptor;
import org.apache.ibatis.plugin.Interceptor;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.Properties;
@Configuration
@MapperScan(basePackages = MasterDataSourceConfig.PACKAGE, sqlSessionFactoryRef = "masterSqlSessionFactory")
public class MasterDataSourceConfig {
/**
* 配置多数据源 关键就在这里 这里配置了不同的mapper对应不同的数据源
*/
static final String PACKAGE = "com.guozhi.bloodanalysis.mapper";
static final String MAPPER_LOCATION = "classpath:mapper/*.xml";
/**
* 连接数据库信息 这个其实更好的是用配置中心完成
*/
@Value("${master.datasource.url}")
private String url;
@Value("${master.datasource.username}")
private String username;
@Value("${master.datasource.password}")
private String password;
@Value("${master.datasource.driverClassName}")
private String driverClassName;
/**
* 下面的配置信息可以读取配置文件其实可以直接写死 如果是多数据源的话 还是考虑读取配置文件
*/
@Value("${spring.datasource.initialSize}")
private int initialSize;
@Value("${spring.datasource.minIdle}")
private int minIdle;
@Value("${spring.datasource.maxActive}")
private int maxActive;
@Value("${spring.datasource.maxWait}")
private int maxWait;
@Value("${spring.datasource.timeBetweenEvictionRunsMillis}")
private int timeBetweenEvictionRunsMillis;
@Value("${spring.datasource.minEvictableIdleTimeMillis}")
private int minEvictableIdleTimeMillis;
@Value("${spring.datasource.validationQuery}")
private String validationQuery;
@Value("${spring.datasource.testWhileIdle}")
private boolean testWhileIdle;
@Value("${spring.datasource.testOnBorrow}")
private boolean testOnBorrow;
@Value("${spring.datasource.testOnReturn}")
private boolean testOnReturn;
@Value("${spring.datasource.poolPreparedStatements}")
private boolean poolPreparedStatements;
@Value("${spring.datasource.maxPoolPreparedStatementPerConnectionSize}")
private int maxPoolPreparedStatementPerConnectionSize;
@Value("${spring.datasource.filters}")
private String filters;
@Value("{spring.datasource.connectionProperties}")
private String connectionProperties;
@Bean(name = "masterDataSource")
@Primary //标志这个 Bean 如果在多个同类 Bean 候选时,该 Bean 优先被考虑。
public DataSource masterDataSource() {
DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
dataSource.setDriverClassName(driverClassName);
//具体配置
dataSource.setInitialSize(initialSize);
dataSource.setMinIdle(minIdle);
dataSource.setMaxActive(maxActive);
dataSource.setMaxWait(maxWait);
dataSource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
dataSource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
dataSource.setValidationQuery(validationQuery);
dataSource.setTestWhileIdle(testWhileIdle);
dataSource.setTestOnBorrow(testOnBorrow);
dataSource.setTestOnReturn(testOnReturn);
dataSource.setPoolPreparedStatements(poolPreparedStatements);
dataSource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
/**
* 这个是用来配置 druid 监控sql语句的 非常有用 如果你有两个数据源 这个配置哪个数据源就坚实哪个数据源的sql 同时配置那就都监控
*/
try {
dataSource.setFilters(filters);
} catch (SQLException e) {
e.printStackTrace();
}
dataSource.setConnectionProperties(connectionProperties);
return dataSource;
}
@Bean(name = "masterTransactionManager")
@Primary
public DataSourceTransactionManager masterTransactionManager() {
return new DataSourceTransactionManager(masterDataSource());
}
@Bean(name = "masterSqlSessionFactory")
@Primary
public SqlSessionFactory masterSqlSessionFactory(@Qualifier("masterDataSource") DataSource masterDataSource)
throws Exception {
final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
sessionFactory.setDataSource(masterDataSource);
sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver().getResources(MasterDataSourceConfig.MAPPER_LOCATION));
//分页插件
Interceptor interceptor = new PageInterceptor();
Properties properties = new Properties();
//数据库
properties.setProperty("helperDialect", "mysql");
// properties.setProperty("dialect", "mysql");
//是否将参数offset作为PageNum使用
properties.setProperty("offsetAsPageNum", "true");
//是否进行count查询
properties.setProperty("rowBoundsWithCount", "true");
//是否分页合理化
properties.setProperty("reasonable", "false");
interceptor.setProperties(properties);
sessionFactory.setPlugins(new Interceptor[] {interceptor});
return sessionFactory.getObject();
}
}

43
src/main/java/com/guozhi/bloodanalysis/config/SwaggerConfig.java

@ -0,0 +1,43 @@
package com.guozhi.bloodanalysis.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.service.Contact;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@Configuration
@EnableSwagger2// 开启Swagger2的自动配置
public class SwaggerConfig extends WebMvcConfigurationSupport {
/**
* 解决高版本springboot无法访问http://localhost:8001/swagger-ui.html
* @param registry void
*/
@Override
protected void addResourceHandlers(ResourceHandlerRegistry registry) {
// 解决静态资源无法访问
registry.addResourceHandler("/**").addResourceLocations("classpath:/static/");
// 解决swagger无法访问
registry.addResourceHandler("/swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/");
// 解决swagger的js文件无法访问
registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
}
//配置文档信息
private ApiInfo apiInfo() {
return new ApiInfoBuilder()
.title("接口文档") // 标题
.description("各个controller以及接口,参数等展示页") //描述
.version("v1.0") // 版本
.contact(new Contact("薛寅飞", "http://192.168.8.138:8082/dispatch/swagger-ui.html", "qc_xueyinfei@xcmg.com"))
.build();
}
@Bean
public Docket docket() {
return new Docket(DocumentationType.SWAGGER_2).apiInfo(apiInfo());
}
}

36
src/main/java/com/guozhi/bloodanalysis/controller/BloodAnalysisController.java

@ -0,0 +1,36 @@
package com.guozhi.bloodanalysis.controller;
import com.guozhi.bloodanalysis.parser.SqlParser;
import com.guozhi.bloodanalysis.service.BloodAnalysisService;
import com.guozhi.bloodanalysis.utils.ApiResult;
import com.guozhi.bloodanalysis.utils.RedisUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import java.io.FileNotFoundException;
@RestController
public class BloodAnalysisController {
@Autowired
BloodAnalysisService bloodAnalysisService;
@Autowired
RedisUtils redisUtils;
@PostMapping("/bloodAnalysis")
public ApiResult<String> BloodAnalysis(HttpServletRequest request) {
String dashUserName = request.getHeader("dashUserName");
String dashPassword = request.getHeader("dashPassword");
Boolean startBloodAnalysis = (Boolean)redisUtils.get("startBloodAnalysis");
if (startBloodAnalysis != null && startBloodAnalysis){
return ApiResult.success("正在执行血缘解析任务,无需重复执行");
}else {
bloodAnalysisService.analysis(dashUserName,dashPassword);
return ApiResult.success("启动任务成功,请稍后查看");
}
}
}

17
src/main/java/com/guozhi/bloodanalysis/entity/DataLineageInfo.java

@ -0,0 +1,17 @@
package com.guozhi.bloodanalysis.entity;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class DataLineageInfo {
private Integer onum;
private String ssysCd;
private String mdlName;
private String procName;
private String procLine;
private String procText;
}

28
src/main/java/com/guozhi/bloodanalysis/entity/MetaBloodAnalysis.java

@ -0,0 +1,28 @@
package com.guozhi.bloodanalysis.entity;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class MetaBloodAnalysis {
private String id;
private Integer proId;
private String proName;
private String targetSysCd;
private String targetMdlName;
private String targetTableName;
private String targetTableCnName;
private String targetColName;
private String targetColCnName;
private String targetColType;
private String sourceSysCd;
private String sourceMdlName;
private String sourceTableName;
private String sourceTableCnName;
private String sourceColName;
private String sourceColCnName;
private String sourceColType;
}

18
src/main/java/com/guozhi/bloodanalysis/entity/MetaColumn.java

@ -0,0 +1,18 @@
package com.guozhi.bloodanalysis.entity;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class MetaColumn {
private int onum;
private String ssysCd;
private String mdlName;
private String tableEngName;
private String tableCnName;
private String fldEngName;
private String fldCnName;
}

19
src/main/java/com/guozhi/bloodanalysis/exception/BusinessException.java

@ -0,0 +1,19 @@
package com.guozhi.bloodanalysis.exception;
import lombok.Data;
import lombok.EqualsAndHashCode;
@EqualsAndHashCode(callSuper = false)
@Data
public class BusinessException extends RuntimeException{
private String code;
private String message;
public BusinessException(String message) {
super(message);
}
public BusinessException(String code, String message) {
super(message);
this.code = code;
this.message = message;
}
}

11
src/main/java/com/guozhi/bloodanalysis/mapper/DataLineageInfoMapper.java

@ -0,0 +1,11 @@
package com.guozhi.bloodanalysis.mapper;
import com.guozhi.bloodanalysis.entity.DataLineageInfo;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
@Mapper
public interface DataLineageInfoMapper {
List<DataLineageInfo> search();
}

25
src/main/java/com/guozhi/bloodanalysis/mapper/MetaBloodAnalysisMapper.java

@ -0,0 +1,25 @@
package com.guozhi.bloodanalysis.mapper;
import com.guozhi.bloodanalysis.entity.MetaBloodAnalysis;
import com.guozhi.bloodanalysis.entity.MetaColumn;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Map;
@Mapper
public interface MetaBloodAnalysisMapper {
void insert(@Param("blood") MetaBloodAnalysis metaBloodAnalysis);
MetaColumn isColExis(@Param("db") String db, @Param("schema") String schema, @Param("tableCode") String tableCode, @Param("columnName") String columnName);
List<Map<String, String>> getColumnsByTabId(@Param("tableId") String tabId,@Param("colCode") String colCode);
List<Map<String, Object>> getSystem(@Param("schema") String schema, @Param("tableCode") String tableCode);
List<Map<String, String>> getColumnsByTable(@Param("tableName") String tableName, @Param("ssysCd") String defaultDb, @Param("mdlName") String defaultSchema);
void deleteAllBloodData();
}

302
src/main/java/com/guozhi/bloodanalysis/parser/SqlParser.java

@ -0,0 +1,302 @@
package com.guozhi.bloodanalysis.parser;
import com.guozhi.bloodanalysis.entity.DataLineageInfo;
import com.guozhi.bloodanalysis.exception.BusinessException;
import com.guozhi.bloodanalysis.parser.clean.GenericLogNormalizer;
import com.guozhi.bloodanalysis.parser.common.*;
import com.guozhi.bloodanalysis.parser.utils.ExportParseResultUtil;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import gudusoft.gsqlparser.EDbVendor;
import gudusoft.gsqlparser.TCustomSqlStatement;
import gudusoft.gsqlparser.TGSqlParser;
import gudusoft.gsqlparser.TStatementList;
import gudusoft.gsqlparser.stmt.TAlterTableStatement;
import gudusoft.gsqlparser.stmt.TCreateTableSqlStatement;
import gudusoft.gsqlparser.stmt.TDeleteSqlStatement;
import gudusoft.gsqlparser.stmt.TInsertSqlStatement;
import gudusoft.gsqlparser.stmt.TSelectSqlStatement;
import gudusoft.gsqlparser.stmt.TUpdateSqlStatement;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.guozhi.bloodanalysis.parser.utils.DatabaseType.Oracle;
@Slf4j
@Component
@Data
@NoArgsConstructor
@Scope("prototype")
public class SqlParser {
//解析器对象
private TGSqlParser sqlParser = null;
//sql解析器上下文
private ParserContext parserContext = new ParserContext();
@Autowired
ExportParseResultUtil exportParseResultUtil;
@Autowired
ApplicationContext applicationContext;
public void parse(DataLineageInfo dataLineageInfo, List<Map<String, String>> databaseList) throws Exception {
String sqlText = "";
try {
List<String> targetSqlList = new ArrayList<>();
String dbType = "MYSQL";
if (databaseList !=null && databaseList.size()>0){
for (Map<String, String> map : databaseList) {
String type = map.get(dataLineageInfo.getSsysCd());
if (type != null) {
dbType = type;
}
}
}
String sql = new GenericLogNormalizer().normalizer(dataLineageInfo.getProcText(),dbType);
sql = optDeclare(sql,dataLineageInfo.getProcName());
sql = optDeclare2(sql);
if(sql.trim().equals("")){
throw new BusinessException("errorSQLparse:"+dataLineageInfo.getProcName());
}
targetSqlList.add(sql);
String defaultSchema = dataLineageInfo.getMdlName();
String defaultSystem = dataLineageInfo.getSsysCd();
this.parserContext.setDefaultDb(defaultSystem);
this.parserContext.setDefaultSchema(defaultSchema);
int length = targetSqlList.size();
for (String s : targetSqlList) {
sqlText = s;
if ("ORACLE".equals(dbType)) {
if (Objects.equals(dbType, Oracle.toString())) {
if (sqlText != null) {
sqlText = sqlText.toUpperCase();
}
}
sqlParser = new TGSqlParser(EDbVendor.dbvoracle);
}
if ("MYSQL".equals(dbType)) {
sqlParser = new TGSqlParser(EDbVendor.dbvmysql);
}
if ("SQLSERVER".equals(dbType)) {
sqlParser = new TGSqlParser(EDbVendor.dbvmssql);
}
if ("TERADATA".equals(dbType)) {
sqlParser = new TGSqlParser(EDbVendor.dbvteradata);
}
if ("POSTGRESQL".equals(dbType)) {
sqlParser = new TGSqlParser(EDbVendor.dbvpostgresql);
}
if ("DB2".equals(dbType)) {
sqlParser = new TGSqlParser(EDbVendor.dbvdb2);
}
sqlParser.sqltext = sqlText;
int ret = sqlParser.parse();
List<KColumn> result = new ArrayList<>();
if (ret == 0) {
TStatementList statementList = sqlParser.getSqlstatements();
while (statementList.hasNext()) {
TCustomSqlStatement stmt = statementList.next();
parserContext.getTableInCurrentStatement().clear();
switch (stmt.sqlstatementtype) {
case sstselect:
result = parseSelect(stmt);
break;
case sstdelete:
result = parseDelete(stmt);
break;
case sstupdate:
result = parseUpdate(stmt);
break;
case sstinsert:
result = parseInsert(stmt);
break;
case sstcreatetable:
result = parseCreateTable(stmt);
break;
case sstcreateview:
break;
case sstoraclealtertablespace:
// parseAlterTable(stmt);
break;
case sstdroptable:
// parseDropTable(stmt);
break;
case sstmerge:
log.error("sstmerge["
+ stmt.sqlstatementtype
+ "][unknow sql type]sqltext:"
);
default:
log.error("[" + stmt.sqlstatementtype
+ "][unknow sql type]sqltext:"
);
break;
}
}
} else {
log.error(sqlParser.sqltext + sqlParser.getErrormessage());
}
if (result.size() > 0) {
exportParseResultUtil.expResult(result, dataLineageInfo);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
private String optDeclare(String sql,String procName) {
String returnSql=sql;
if(StringUtils.isNotEmpty(sql)){
//储存过程名称
String[] sqls = sql.split(";");
for(String sql2:sqls){
sql2 = sql2.trim();
//去掉set和 truncate和delete语句 还有引用其他存储过程的语句
if(sql2.startsWith("SET")||sql2.startsWith("TRUNCATE")||sql2.startsWith("DELETE")||sql2.startsWith("PERFORM")){
sql = sql.replace(sql2+";", "");
}
// String dd = sql2.replaceAll(" ", "");
// int startIndex = dd.toUpperCase().lastIndexOf("PROCEDURE");
// if(startIndex >= 0){
// startIndex += "PROCEDURE".length();
// int endIndex = dd.indexOf("(");
// if(endIndex >= 0){
// procName = dd.substring(startIndex, endIndex);
// }
// }
/*
Pattern pat = Pattern.compile("(DECLARE)(\\s*)");
Matcher mat = pat.matcher(sql2);
if (mat.find()) {
sql2 = mat.replaceAll("");
String sub = sql2.substring(0,sql2.indexOf(" ")).trim();
if(sql.contains(sub)){
sql = sql.replaceAll(sub, "'88888888'");
}
}*/
}
Pattern pat = Pattern.compile("(DECLARE)(\\s).*");
Matcher mat = pat.matcher(sql);
while (mat.find()) {
sql = mat.replaceAll("");
}
//找出脚本结束的位置
Pattern lastPattern = Pattern.compile("(END)(\\s)+"+("".equals(procName)?";":procName));
Matcher lastMatcher = lastPattern.matcher(sql);
if(lastMatcher.find()){
int lastIndex = lastMatcher.start();
if(sql.contains("BEGIN")){
returnSql = sql.substring(sql.indexOf("BEGIN")+5, lastIndex);
}else{
returnSql = sql.substring(0, lastIndex);
}
}else{
Pattern tempPattern = Pattern.compile("(END)(\\s)*;");
Matcher tempMatcher = tempPattern.matcher(sql);
Pattern tempPatternBegin = Pattern.compile("(\\s)(BEGIN)(\\s)");
Matcher tempMatcherBegin = tempPatternBegin.matcher(sql);
if(tempMatcher.find()){
int tempIndex = tempMatcher.start();
int tempIndexBegin = 0;
if(tempMatcherBegin.find()){
tempIndexBegin = tempMatcherBegin.start();
}
returnSql = sql.substring(tempIndexBegin+6, tempIndex);
}
}
}
return returnSql;
}
private String optDeclare2(String sql) {
StringBuilder returnVal = new StringBuilder();
if(StringUtils.isNotEmpty(sql)){
String[] sqls = sql.split(";");
for(String sql2:sqls){
sql2 = sql2.trim();
//去掉set和 truncate和delete语句 还有引用其他存储过程的语句
if(sql2.toUpperCase().trim().startsWith("INSERT")){
returnVal.append(sql2.substring(sql2.toUpperCase().indexOf("INSERT"))).append(";\r\n");
}else if(sql2.toUpperCase().trim().startsWith("UPDATE")){
returnVal.append(sql2.substring(sql2.toUpperCase().indexOf("UPDATE"))).append(";\r\n");
}else if(sql2.toUpperCase().trim().startsWith("MERGE")){
//TODO wxl merge语句的解析暂时不支持,导致报错
// returnVal.append(sql2.substring(sql2.toUpperCase().indexOf("MERGE"))+";\r\n");
}else if(sql2.toUpperCase().trim().startsWith("CREATE")){
returnVal.append(sql2.substring(sql2.toUpperCase().indexOf("CREATE"))).append(";\r\n");
}
}
}
return returnVal.toString();
}
private List<KColumn> parseAlterTable(TCustomSqlStatement stmt) {
TAlterTableStatement alterTableStatement=(TAlterTableStatement)stmt;
AlterParser parser=new AlterParser(alterTableStatement,parserContext);
parser.parse();
return parser.getParseResult();
}
private List<KColumn> parseUpdate(TCustomSqlStatement stmt) {
TUpdateSqlStatement dStmt=(TUpdateSqlStatement)stmt;
UpdateParser parser=new UpdateParser(dStmt,parserContext);
parser.parse();
return parser.getParseResult();
}
private List<KColumn> parseDelete(TCustomSqlStatement stmt) {
TDeleteSqlStatement dStmt=(TDeleteSqlStatement)stmt;
DeleteParser parser=new DeleteParser(dStmt,parserContext);
parser.parse();
return parser.getParseResult();
}
private List<KColumn> parseInsert(TCustomSqlStatement stmt) {
InsertParser parser = applicationContext.getBean(InsertParser.class);
parser.setMInsertSql((TInsertSqlStatement)stmt);
parser.setMParserContext(this.parserContext);
parser.parse();
return parser.getParseResult();
}
// private void parseDropTable(TCustomSqlStatement stmt) {
// DropParser dp=new DropParser(stmt,parserContext);
// dp.parse();
// }
public List<KColumn> parseCreateTable(TCustomSqlStatement stmt) {
TCreateTableSqlStatement ctSql=(TCreateTableSqlStatement)stmt;
CreateParser parser=new CreateParser(ctSql,parserContext);
parser.parse();
return parser.getParseResult();
}
public List<KColumn> parseSelect(TCustomSqlStatement stmt) {
TSelectSqlStatement selectSqlStatement = (TSelectSqlStatement) stmt;
SelectParser parser=new SelectParser(selectSqlStatement,parserContext);
parser.parse();
return parser.getParseResult();
}
}

145
src/main/java/com/guozhi/bloodanalysis/parser/clean/GenericLogNormalizer.java

@ -0,0 +1,145 @@
package com.guozhi.bloodanalysis.parser.clean;
import lombok.extern.slf4j.Slf4j;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Slf4j
public class GenericLogNormalizer{
public String normalizer(String logstr,String dbType) {
String retVal=logstr.toUpperCase();
if(retVal.length()>0){//说明存在有效内容
// retVal = this.replaceYinhao(retVal);
retVal = this.replaceFenhao(retVal);
retVal = this.replaceUnuseful(retVal);
retVal = this.dropUnuseful(retVal);
retVal = this.dropTableFix(retVal);
//替换出现在非查询结束状态中的升降序关键字ROW_NUMBER()OVER(PARTITION BY WORKDATE,AGENTSERIALNO DESC) AS NUM;
retVal = this.replaceASC(retVal);
retVal = this.changeAlter(retVal);
if("GreenPlum".equals(dbType)){
//GP类数据库清洗器
retVal = new GenericLogNormalizerGP(retVal).parser();
}else if("Oracle".equals(dbType)){
//oracle类数据库清洗器
retVal = new GenericLogNormalizerOra(retVal).parser();
}else if("Teradata".equals(dbType)){
//teradata类数据库清洗器
retVal = new GenericLogNormalizerTrd(retVal).parser();
}else if("DB2".equals(dbType)){
//DB2类数据库清洗器
//TODO 未实现
retVal = new GenericLogNormalizerGP(retVal).parser();
}
}
return retVal.trim();
}
private String replaceUnuseful(String retVal) {
Pattern pat = Pattern.compile("--'\\s*\\|\\|");
Matcher mat = pat.matcher(retVal);
while (mat.find()) {
String s = mat.group();
String _s = s.replaceAll("--", "");
retVal = retVal.replace(s, _s);
}
pat = Pattern.compile("FORMAT\\s*'--.*?'");
mat = pat.matcher(retVal);
while (mat.find()) {
String s = mat.group();
String _s = s.replaceAll("--", "");
retVal = retVal.replace(s, _s);
}
return retVal;
}
private String replaceFenhao(String str) {
Pattern pat = Pattern.compile("'(\\s|\\d|;|\\||%)*(;)+(\\s|\\d|;|\\||%)*'");
Matcher mat = pat.matcher(str);
while (mat.find()) {
str = mat.replaceAll("\\'fenhao\\'");
}
return str ;
}
private String dropUnuseful(String retVal) {
Pattern p = Pattern.compile("(?ms)(--.*?$)|(/\\*.*?\\*/)");
retVal = p.matcher(retVal).replaceAll(""); //去掉/*...*/的多行注释,和以 -- 开始的单行注释
log.info("去除/*...*/和以 -- 开始的单行注释,取出BEGIN,END等多余字符 , 成功");
return retVal;
}
private String dropTableFix(String str) {
Pattern pat = Pattern.compile("(((_1_PRT_P)|(_1_PRT_SDB))_PARTITION_(\\$)(\\w)*(\\s))");
Matcher mat = pat.matcher(str);
while (mat.find()) {
str = mat.replaceAll(" ");
}
pat = Pattern.compile("(((_1_PRT_P)|(_1_PRT_SDB))_PARTITION_(\\$\\{)(\\s)*(\\w)*(\\s)*(\\})(\\s))");
mat = pat.matcher(str);
while (mat.find()) {
str = mat.replaceAll(" ");
}
pat = Pattern.compile("(((_1_PRT_P)|(_1_PRT_SDB))_PARTITION_(\\d)+(\\s))");
mat = pat.matcher(str);
while (mat.find()) {
str = mat.replaceAll(" ");
}
return str;
}
private String replaceASC(String str) {
Pattern pat = Pattern.compile("(\\bASC\\b)|(\\bDESC\\b)");
Matcher mat = pat.matcher(str);
while (mat.find()) {
str = mat.replaceAll("");
}
return str;
}
private String changeAlter(String str){
//String str_change =str;
Pattern pat = Pattern.compile("(?ms)(ALTER)(\\s)+(TABLE)(\\s)+(\\w)+(\\.)*(\\w)*(\\s)*" +
"(EXCHANGE)(\\s)*(PARTITION)(\\s)+(\\w)+(\\s)+(WITH)(\\s)+" +
"(TABLE)(\\s)+(\\w)+(\\.)*(\\w)*(\\s)*;");
Matcher mat = pat.matcher(str);
String dest_table ="";
String source_table ="";
while(mat.find()){
String sen = mat.group();
dest_table = sen.substring(sen.indexOf("ALTER")+"ALTER".length(),sen.indexOf("EXCHANGE")).trim();
dest_table = dest_table.substring("table".length()).trim();
source_table = sen.substring(sen.indexOf("WITH")+"WITH".length(),sen.indexOf(";")).trim();
source_table = source_table.substring("table".length()).trim();
str=str.replace(sen,"ALTER TABLE "+source_table+" RENAME TO "+ dest_table+" ;");
}
return str;
}
public static void main(String[] args) {
//------------------------------------------------TRIM(COALESCE(t2.brch_no),'')
// String sql = "COALESCE(TRIM(A.COD_ACCT_NO_BENEF)||TRIM(CAST(A.CTR_SWP_INST_NO AS INTEGER)),'')";
String sql = "TRIM(COALESCE(t2.brch_no),'')";
Pattern p = Pattern.compile("(\\s*)(TRIM)(\\s*)\\((\\w+)\\(((\\w|\\.)+)\\)(\\s*)(,(\\s*)'')(\\s*)\\)");
// sql = p.matcher(sql).replaceAll(" ");
// System.out.println(sql.trim());
Matcher mat = p.matcher(sql);
while(mat.find()){
System.out.println(mat.group());
System.out.println("1="+mat.group(1));
System.out.println("2="+mat.group(2));
System.out.println("3="+mat.group(3));
System.out.println("4="+mat.group(4));
System.out.println("5="+mat.group(5));
System.out.println("6="+mat.group(6));
System.out.println("7="+mat.group(7));
System.out.println("8="+mat.group(8));
System.out.println("9="+mat.group(9));
}
}
}

95
src/main/java/com/guozhi/bloodanalysis/parser/clean/GenericLogNormalizerGP.java

@ -0,0 +1,95 @@
package com.guozhi.bloodanalysis.parser.clean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class GenericLogNormalizerGP {
private String retVal;
public GenericLogNormalizerGP (String retVal){
this.retVal = retVal;
}
public String parser(){
dropgangt();
dropInvSql();
replaceSpecial();
setSpecialFun();//替换 DATE '';
return retVal;
}
private void dropInvSql(){
/**
* 去除DISTRIBUTED 原来 (GROUP|DISTRIBUTED)(\\s)+(BY)*(.*?)\\)
*/
Pattern pat = Pattern.compile("(?ms)(GROUP|DISTRIBUTED)(\\s)+(BY)(\\s)*\\((.*?)\\)");
retVal = pat.matcher(retVal.toUpperCase()).replaceAll("");
pat = Pattern.compile("\\bROW\\b");
Matcher mat = pat.matcher(retVal);
while (mat.find()) {
retVal = mat.replaceAll("ROW_SYMBOLYANG1111");
}
}
private void dropgangt() {
Pattern pat = Pattern.compile("(?ms)\\\\\\\\[a-zA-Z]+([a-zA-Z]|\\s|\\\\\\\\)*?$");
Matcher mat = pat.matcher(retVal);
while(mat.find()){
retVal = mat.replaceAll("\n");
}
}
private void replaceSpecial() {
/**
* 将剩下的 ${}替换掉
*/
Pattern pat = Pattern.compile("\\$\\{[\\w]*\\}");
Matcher mat = pat.matcher(retVal);
while(mat.find()){
retVal = retVal.replace(mat.group(),"defaultMODEL_"+mat.group().substring(2,mat.group().length() - 1 ));
}
/**
* 将剩下的 $替换掉
*/
retVal = retVal.replace("$", "defaultMODEL_");
/**
* 去除%等特殊符号
*/
retVal=retVal.replace("%", "/");
retVal=retVal.replace("?", " ");
}
private void setSpecialFun(){
//(date(... --- (date( ...
retVal = retVal.replaceAll("(\\()(\\s)*(d|D)(a|A)(t|T)(e|E)(\\s)*\\(", "(datetime(");
//(date'...' ---(date '...'
retVal = retVal.replaceAll("(\\()(\\s)*(d|D)(a|A)(t|T)(e|E)(\\s)*'", "(datetime'");
/**
* 运算符号要包括
*/
//,date(... --- ,(date (...
retVal = retVal.replaceAll(",(\\s)*(d|D)(a|A)(t|T)(e|E)(\\s)*\\(", ",datetime(");
//,date'... --- ,date '...
retVal = retVal.replaceAll(",(\\s)*(d|D)(a|A)(t|T)(e|E)(\\s)*'", ",datetime'");
retVal = retVal.replaceAll("=(\\s)*(d|D)(a|A)(t|T)(e|E)(\\s)*\\(", "=datetime(");
retVal = retVal.replaceAll("=(\\s)*(d|D)(a|A)(t|T)(e|E)(\\s)*'", "=datetime'");
retVal = retVal.replaceAll("-(\\s)*(d|D)(a|A)(t|T)(e|E)(\\s)*\\(", "-datetime(");
retVal = retVal.replaceAll("-(\\s)*(d|D)(a|A)(t|T)(e|E)(\\s)*'", "-datetime'");
retVal = retVal.replaceAll("(>|<|>=|<=)(\\s)*(d|D)(a|A)(t|T)(e|E)(\\s)*\\(", ">datetime(");
retVal = retVal.replaceAll("(>|<|>=|<=)(\\s)*(d|D)(a|A)(t|T)(e|E)(\\s)*'", ">datetime'");
// date ... ---
//retVal = retVal.replaceAll("(\\s)+(d|D)(a|A)(t|T)(e|E)(\\s)+", " datetime ");
// date'... --- date '...
retVal = retVal.replaceAll("(\\s)+(d|D)(a|A)(t|T)(e|E)(\\s)*'", " datetime'");
// date(... --- date (...
retVal = retVal.replaceAll("(\\s)+(d|D)(a|A)(t|T)(e|E)(\\s)*\\(", " datetime(");
}
}

74
src/main/java/com/guozhi/bloodanalysis/parser/clean/GenericLogNormalizerOra.java

@ -0,0 +1,74 @@
package com.guozhi.bloodanalysis.parser.clean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class GenericLogNormalizerOra {
private String retVal;
public GenericLogNormalizerOra (String retVal){
this.retVal = retVal;
}
public String parser(){
dropInvSql();
replaceSpecial();
return retVal;
}
private void replaceSpecial() {
/**
* 将剩下的 ${}替换掉
*/
Pattern pat = Pattern.compile("\\$\\{[\\w]*\\}");
Matcher mat = pat.matcher(retVal);
while(mat.find()){
retVal = retVal.replace(mat.group(),"defaultMODEL_"+mat.group().substring(2,mat.group().length() - 1 ));
}
/**
* 将剩下的 $替换掉
*/
retVal=retVal.replace("$", "defaultMODEL_");
/**
* 去除%等特殊符号
*/
retVal=retVal.replace("%", "/");
}
private void dropInvSql(){
/*
Pattern p = Pattern.compile("((\\s)+(SET).*(\\s)+)");
retVal = p.matcher(retVal).replaceAll("");
*/
/**
* 去除DISTRIBUTED
*/
Pattern p = Pattern.compile("(?ms)(GROUP|DISTRIBUTED)(\\s)+(BY)(\\s)*\\((.*?)\\)");
retVal = p.matcher(retVal.toUpperCase()).replaceAll("");
p = Pattern.compile("((WHENEVER(\\s*)SQLERROR)(\\s)*.*)");
retVal = p.matcher(retVal).replaceAll("");
p = Pattern.compile("((SPOOL)(\\s)*.*)");
retVal = p.matcher(retVal).replaceAll("");
p = Pattern.compile("((COMMIT)(\\s)*;)");
retVal = p.matcher(retVal).replaceAll("");
p = Pattern.compile("CALL(\\s)+(.*?)\\);");
retVal = p.matcher(retVal.toUpperCase()).replaceAll("");
p = Pattern.compile("((EXIT)(\\s)*;)");
retVal = p.matcher(retVal.toUpperCase()).replaceAll(" ");
p = Pattern.compile("(\\s*)(CONNECT)(\\s*)(.*?)(/|\\\\)(.*?)(/|\\\\)(@)(.*?);");
retVal = p.matcher(retVal.toUpperCase()).replaceAll(" ");
if(retVal.lastIndexOf("EOF")!=-1){
retVal = retVal.substring(0,retVal.lastIndexOf("EOF")).trim();
}
}
}

115
src/main/java/com/guozhi/bloodanalysis/parser/clean/GenericLogNormalizerTrd.java

@ -0,0 +1,115 @@
package com.guozhi.bloodanalysis.parser.clean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class GenericLogNormalizerTrd {
private String retVal;
public GenericLogNormalizerTrd (String retVal){
this.retVal = retVal;
}
public String parser(){
dropgangt();
replaceSpecial();
dropNoStdSql();
return retVal;
}
private void dropgangt() {
Pattern pat = Pattern.compile("(?ms)\\\\\\\\[a-zA-Z]+([a-zA-Z]|\\s|\\\\\\\\)*?$");
Matcher mat = pat.matcher(retVal);
while(mat.find()){
retVal = mat.replaceAll("\n");
}
}
private void replaceSpecial() {
/**
* 将剩下的'${}'替换掉
*/
Pattern pat = Pattern.compile("\\$\\{[\\w]*\\}\\s*(>|=|<|LIKE|>=|=>|<=|=<|<>)\\s*\\$\\{[\\w]*\\}");
Matcher mat = pat.matcher(retVal);
while(mat.find()){
retVal = retVal.replace(mat.group(), "'1' = '1'");
}
/**
* 先替换一次带有''${}的特殊变量
*/
pat = Pattern.compile("(>|=|<|LIKE|>=|=>|<=|=<|<>)\\s*\\$\\{[\\w]*\\}");
mat = pat.matcher(retVal);
while(mat.find()){
retVal = retVal.replace(mat.group()," = '1'");
}
/**
* 将剩下的${}替换掉
*/
pat = Pattern.compile("\\$\\{[\\w]*\\}");
mat = pat.matcher(retVal);
while(mat.find()){
retVal = retVal.replace(mat.group(),"");
}
/**
* 将剩下的';'替换掉
*/
pat = Pattern.compile("'(.*?);(.*?)'");
mat = pat.matcher(retVal);
while (mat.find()) {
String s = mat.group();
String _s = s.replace(";", ";");
retVal = retVal.replace(s, _s);
}
/**
* 去除%等特殊符号
*/
retVal=retVal.replace("%", "/");
retVal=retVal.replace("?", " ");
}
private void dropNoStdSql(){
String pattern = "^CREATE.*|^SELECT.*|^INSERT.*|^ALTER.*";
Pattern pat = Pattern.compile(pattern);
String retVals [] = retVal.split(";");
retVal = "";
for (String _retVal : retVals) {
_retVal = _retVal.toUpperCase().trim();
_retVal = dorpLockingSql(_retVal);
Matcher mat = pat.matcher(_retVal);
while(mat.find()){
retVal +=_retVal+";";
}
}
}
/**
* 广农商的存在这个句型20170216
* 去除LOCKING TABLE FOR ACCESS句型
* @param val
*/
private String dorpLockingSql(String val){
String res = null;
String startPattern = "^(LOCKING)\\s*(TABLE)\\s.*(FOR\\s*ACCESS)\\s+[\\s\\S]*";
if(val.matches(startPattern)) {
String keyPat = "FOR\\s*ACCESS?";
Matcher m = Pattern.compile(keyPat).matcher(val);
int keyEnd = 0;
if(m.find()) {
keyEnd=m.end();
res = val.substring(keyEnd);
}
}
return res == null ? val : res.trim();
}
}

62
src/main/java/com/guozhi/bloodanalysis/parser/common/AlterParser.java

@ -0,0 +1,62 @@
package com.guozhi.bloodanalysis.parser.common;
import java.util.List;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import gudusoft.gsqlparser.nodes.TAlterTableOptionList;
import gudusoft.gsqlparser.nodes.TObjectName;
import gudusoft.gsqlparser.stmt.TAlterTableStatement;
import org.springframework.context.annotation.Bean;
import org.springframework.stereotype.Component;
/**
* Created by Walk.Lai on 2015/8/29.
*/
public class AlterParser {
private TAlterTableStatement mStmt;//table, view
private List<KColumn> mColumns;
private VTable mTable;
private ParserContext mParserContext;
public AlterParser(TAlterTableStatement alterSql,ParserContext context){
mStmt =alterSql;
this.mParserContext=context;
}
public void parse() {
//alter table exchange partition with table
// --将新数据以交换分区方式插入目标表
// ALTER TABLE IML_DB.EV_EVENT_PROD_RELA EXCHANGE SUBPARTITION P_1008_1111 WITH TABLE IML_DB.VT_NEW_1008;
/*
例如 ALTER TABLE $PDM_SCH.PD_BCC_3F_TRAN_INFO RENAME TO PD_BCC_3F_TRAN_INFO_EXCHANGE2;
ALTER TABLE $PDM_SCH.PD_BCC_3F_TRAN_INFO_EXCHANGE RENAME TO PD_BCC_3F_TRAN_INFO;
tempTable -- $PDM_SCH.PD_BCC_3F_TRAN_INFO_EXCHANGE
targetTable -- PD_BCC_3F_TRAN_INFO
*/
String tempTableName = null;
String targetTableName = null;
TObjectName obj = mStmt.getTableName();
if (obj!=null) {
tempTableName = obj.toString();
}
TAlterTableOptionList oplist = mStmt.getAlterTableOptionList();
if (oplist!=null) {
for (int i = 0; i < oplist.size(); i++) {
String option = oplist.getAlterTableOption(i).toString();
if (option.contains("RENAME")&&option.contains(" TO ")) {
targetTableName = option.substring(option.indexOf(" TO ")+" TO ".length()).trim();
targetTableName = targetTableName.replace("SYMBOLALTERPOINT", ".");
// targetTableName = this.addAlterSchema(tempTableName,targetTableName);
break;
}
}
}
}
public List<KColumn> getParseResult() {
return null;
}
}

128
src/main/java/com/guozhi/bloodanalysis/parser/common/AsTableParser.java

@ -0,0 +1,128 @@
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
package com.guozhi.bloodanalysis.parser.common;
import com.guozhi.bloodanalysis.parser.utils.KColumnProvider;
import com.guozhi.bloodanalysis.parser.utils.KDatabaseProvider;
import com.guozhi.bloodanalysis.parser.utils.SpUtils;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import gudusoft.gsqlparser.nodes.TTable;
import gudusoft.gsqlparser.stmt.TSelectSqlStatement;
import lombok.NoArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
@Component
@NoArgsConstructor
public class AsTableParser extends BaseParser{
private TTable tTable;
private boolean mKeepConstant=false;//是否保留常量值
private ParserContext mParserContext;
private VTable vTable;//将整个查询语句当成一个表进行处理
@Autowired
ApplicationContext applicationContext;
@Autowired
KDatabaseProvider kDatabaseProvider;
public AsTableParser(TTable tTable,ParserContext context) {
this.tTable = tTable;
vTable = new VTable(SpUtils.generateId(tTable.toString(),""));
this.mParserContext=context;
}
public void parse() {
//TODO 将CTE转化为表关系
mParserContext.setCurrentTable(vTable);
VTable sVTable = analyseTable(tTable);
sVTable.addColumns(new KColumnProvider().getColumns(sVTable, null, mParserContext));
vTable.addTable(sVTable);
mParserContext.setCurrentTable(vTable);
vTable.addColumns(sVTable.getColumns());
mParserContext.addVTable( vTable);
}
private VTable analyseTable(TTable table){
String fullName=table.getFullName();
String tableAlias=table.getAliasName();
VTable vTable=null;
if(StringUtils.isBlank(fullName)){//子查询,fullName为空
vTable=new VTable(SpUtils.generateId(table.toString(),tableAlias));
}else{//普通表名//StringUtils.isNotBlank(fullName)
String[] nameInfo=SpUtils.analyseTableName(fullName);
vTable=new VTable(nameInfo[2],nameInfo[2]);
vTable.db=mParserContext.getDefaultDb();
vTable.schema=mParserContext.getDefaultSchema();
}
if(StringUtils.isNotBlank(tableAlias)){
vTable.setAlias(tableAlias);
}
//已知的表
VTable createdTable = mParserContext.findExistedTable(vTable.getFullName(),true);
if(createdTable!=null) {
createdTable.setAlias(vTable.alias);
return createdTable;
}
switch (table.getTableType()) {
case objectname:
//真实数据库中的表,查找表对应系统,可能存在多个情况
kDatabaseProvider.getDatabase(vTable, mParserContext);
break;
case subquery:
TSelectSqlStatement subQuery = table.getSubquery();
// VTable currentTable=mParserContext.getCurrentTable();
SelectParser sp = applicationContext.getBean(SelectParser.class);
sp.initWithTable(subQuery,mParserContext,vTable,mKeepConstant);
sp.parse();
// mParserContext.setCurrentTable(vTable);
//创建依赖关系 重复了。。。。。。。。。。
// vTable.addColumns(sp.getParseResult());
// vTable.getFromTables().addAll(sp.vTable.getFromTables());
break; //subquery
case function:
// table.getFuncCall();
break;
case tableExpr:break;
case rowList:break;
//SQL Server only
case containsTable:break;
case freetextTable:break;
default:break;
}
mParserContext.addVTable(vTable);
return vTable;
}
public List<KColumn> getParseResult() {
List<KColumn> ret=new ArrayList<>();
for(KColumn column: vTable.getColumns()){
if(column==null){
ret.add(column);
}else {
if (column.isStar) {
ret.addAll(column.getRefColumns());
} else {
ret.add(column);
}
}
}
return ret;
}
}

202
src/main/java/com/guozhi/bloodanalysis/parser/common/BaseParser.java

@ -0,0 +1,202 @@
package com.guozhi.bloodanalysis.parser.common;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class BaseParser {
public void getResultParser(ParserContext mParserContext, VTable tTable, List<KColumn> mColumns){
VTable currTable = mParserContext.getCurrentTable();
if(currTable==null){
return;
}
Map<String,VTable> tables = new HashMap<String, VTable>();
if(mColumns!=null){
//先清理一遍空表字段对象,并从目标找到最终源字段
cleanNullTabCol(mParserContext, tTable, mColumns);
//开始验证当前SQL中的源表是否都存在结果对象中,如果不存在则需要创建一个默认字段,做到目标-源表的关系
List<VTable> sTables = currTable.getFromTables();
for (VTable sTable : sTables) {
childTable(mParserContext, tTable, sTable, mColumns, tables);
}
}
}
/***
* 清空目标表并从找到源字段
* @param mParserContext
* @param tTable
* @param mColumns
*/
private void cleanNullTabCol(ParserContext mParserContext,VTable tTable,List<KColumn> mColumns){
//记录需要删除的目标列对象
List<KColumn> delTCols = new ArrayList<KColumn>();
for (int i = 0; i < mColumns.size(); i++) {
KColumn tCol = mColumns.get(i);
List<KColumn> sCols = tCol.getRefColumns();
//删除对象集合
List<KColumn> delSCols = new ArrayList<KColumn>();
//调整级别的对象
List<KColumn> addSCols = new ArrayList<KColumn>();
Map<String,KColumn> sMap = new HashMap<String, KColumn>();
//开始迭代子级判断获得最终的列对象
childColumn(sCols, delSCols,addSCols,sMap,false);
//有调整列级别的
if(addSCols.size()>0){
for (KColumn kColumn : addSCols) {
sCols.add(kColumn);
}
}
//删除空白记录
for (int j = 0; j < delSCols.size(); j++) {
sCols.remove(delSCols.get(j));
}
if(sCols.size()<=0){
delTCols.add(tCol);
}
}
//删除没有源的目标记录
if(delTCols.size()>0){
for (int i = 0; i < delTCols.size(); i++) {
mColumns.remove(delTCols.get(i));
}
}
}
private void childColumn(List<KColumn> sCols,List<KColumn> delSCols,List<KColumn> addSCols,Map<String,KColumn> sMap,boolean isChild){
for (KColumn sCol : sCols) {
//有表别名,但没有表名称,是一个子查询(select t.col from (select col from table) t )
List<KColumn> _sCols = sCol.getRefColumns();
//还有来源,继续往下走,一直走到最后一层级获取源头字段是谁
if(_sCols.size()>0){
isChild = true;
delSCols.add(sCol);
childColumn(sCol.getRefColumns(), delSCols, addSCols,sMap,isChild);
}else{
if(sCol.tableName!=null){
if(isChild){
String colKey = sCol.db+"."+sCol.schema+"."+sCol.tableName+"."+sCol.columnName;
if(sMap.containsKey(colKey)){
delSCols.add(sCol);
}else{
String pattern = "(^'.*'$)|(^\\d+$)";
Pattern pat = Pattern.compile(pattern);
Matcher mat = pat.matcher(sCol.columnName.toUpperCase().trim());
if(mat.find()){
delSCols.add(sCol);
}else{
sMap.put(colKey, sCol);
addSCols.add(sCol);
}
}
}
}else{
delSCols.add(sCol);
}
}
}
}
private void childTable(ParserContext mParserContext,VTable tTable,VTable sTable,List<KColumn> mColumns,Map<String,VTable> tables){
if(sTable.isRealTable()){
//判断源表是否存在
isSourceTableExis(tables, sTable, tTable, mParserContext, mColumns);
}else{
List<VTable> sTbles = sTable.getFromTables();
for (VTable sCTable : sTbles) {
//判断源表是否都存在
childTable(mParserContext, tTable, sCTable, mColumns, tables);
}
}
}
/**
* 源表是否存在
* @param tables 源表Map集合
* @param sTable 源表
* @param tTable 目标表
* @param mParserContext sql上下文
* @param mColumns 目标表字段
*/
private void isSourceTableExis(Map<String,VTable> tables,VTable sTable,VTable tTable,ParserContext mParserContext,List<KColumn> mColumns){
//源表主键KEY
String tabKey = sTable.db+"."+sTable.schema+"."+sTable.name;
boolean isTableExis = false;
//如果已经进行过验证,则退出
if(tables.containsKey(tabKey)){
return;
}
//开始迭代判断当前源表是否已经写入结果
for (int i = 0; i < mColumns.size(); i++) {
KColumn tCol = mColumns.get(i);
List<KColumn> sCols = tCol.getRefColumns();
//开始迭代查找
isTableExis = childColumn(sCols, tabKey);
if(isTableExis){
break;
}
}
if(sTable.isCreated()){
//判断源表是否都存在,如果不存在,则建立源表与目标表的关系,字段建立一个默认字段
if(!isTableExis){
//目标
KColumn tCol = new KColumn();
//TODO wxl 对列名进行分析?解析不到字段级别,最终结果需要保留到表级
tCol.columnName = "default_col";
tCol.alias = "默认字段";
tCol.columnPrefix = tTable.getName();
tCol.db=tTable.db;
tCol.schema=tTable.schema;
//表的状态是临时表还是实体表
tCol.isEvTable = tTable.isCreated();
tCol.tableName=tTable.getName();
tCol.export=true;
//源
KColumn sCol = new KColumn();
//TODO wxl 对列名进行分析?解析不到字段级别,最终结果需要保留到表级
sCol.columnName = "default_col";
sCol.alias = "默认字段";
sCol.columnPrefix = sTable.getName();
sCol.db=sTable.db;
sCol.schema=sTable.schema;
sCol.tableName=sTable.getName();
sCol.export=true;
//表的状态是临时表还是实体表
sCol.isEvTable = sTable.isCreated();
tCol.addRefColumn(sCol);
mColumns.add(tCol);
tables.put(tabKey, sTable);
}
}
}
private boolean childColumn(List<KColumn> sCols,String tabKey){
for (KColumn sCol : sCols) {
//源字段表主键KEY
String tabColKey = sCol.db+"."+sCol.schema+"."+sCol.tableName;
if(tabColKey.toUpperCase().equals(tabKey.toUpperCase())){
return true;
}else{
//如果当前表不存在此层级,判断是否还有下一级继续找
if(sCol.getRefColumns().size()>0){
childColumn(sCol.getRefColumns() ,tabKey);
}
}
}
return false;
}
}

194
src/main/java/com/guozhi/bloodanalysis/parser/common/CreateParser.java

@ -0,0 +1,194 @@
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
package com.guozhi.bloodanalysis.parser.common;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.guozhi.bloodanalysis.parser.utils.SpUtils;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import gudusoft.gsqlparser.TCustomSqlStatement;
import gudusoft.gsqlparser.nodes.TColumnDefinition;
import gudusoft.gsqlparser.nodes.TColumnDefinitionList;
import gudusoft.gsqlparser.nodes.TTable;
import gudusoft.gsqlparser.stmt.TCreateTableSqlStatement;
import gudusoft.gsqlparser.stmt.TCreateViewSqlStatement;
import gudusoft.gsqlparser.stmt.TSelectSqlStatement;
public class CreateParser extends BaseParser {
private TCustomSqlStatement mCreateSql;//table, view
private List<KColumn> mColumns;
/**当前目标表*/
private VTable mTable;
private ParserContext mParserContext;
public CreateParser(TCustomSqlStatement createSql,ParserContext context){
mCreateSql=createSql;
this.mParserContext=context;
}
public void parse() {
if(mCreateSql instanceof TCreateTableSqlStatement){
TCreateTableSqlStatement ctSql=(TCreateTableSqlStatement)mCreateSql;
String tableName=ctSql.getTableName().toString();
//分析表名,如:db.schema.table或schema.table
String[] nameInfo= SpUtils.analyseTableName(tableName);
//用表名和别名初始化一个表
mTable=new VTable(nameInfo[2],nameInfo[2]);
mTable.db=mParserContext.getDefaultDb();
mTable.schema=mParserContext.getDefaultSchema();
//设置并判断是否临时表,如:CREATE MULTISET|VOLATILE的为临时表
mTable.setCreated(isEvTable(mCreateSql.toString()));
TColumnDefinitionList cdList = ctSql.getColumnList();
List<KColumn> definedColumns=new ArrayList<KColumn>();//定义的列
for(int i=0;i<cdList.size();i++){
TColumnDefinition colDef=cdList.getColumn(i);
KColumn col=new KColumn();
col.columnName=colDef.getColumnName().getColumnNameOnly();
col.columnPrefix =tableName;
col.alias=col.columnName;
col.db=mTable.db;
col.schema=mTable.schema;
col.tableName=mTable.getName();
//表的状态是临时表还是实体表
col.isEvTable = mTable.isCreated();
definedColumns.add(col);
}
TSelectSqlStatement subQuery=ctSql.getSubQuery();
List<KColumn> selectColumns=null;//subQuery中的结果列
if(subQuery!=null){
SelectParser sp=new SelectParser(subQuery,mParserContext);
sp.parse();
selectColumns=sp.getParseResult();
}else{
TTable tTable = ctSql.getAsTable();
if(tTable!=null){
System.out.println("此处可能为AsTable,暂未考虑如何获取:"+ctSql.getAsTable());
// System.out.println("sql:"+ctSql);
AsTableParser asTable = new AsTableParser( tTable, mParserContext);
//设置上下文当前表,解析源表和目标表
asTable.parse();
selectColumns = asTable.getParseResult();
}
}
if(selectColumns!=null&&selectColumns.size()>0) {
List<KColumn> sourceColumns=new ArrayList<KColumn>();
for(KColumn c: selectColumns){
if(c!=null&&c.isStar&&c.getRefColumns().size()>0){
sourceColumns.addAll(c.getRefColumns());
}else{
sourceColumns.add(c);
}
}
if(definedColumns.size()>0) {
if(definedColumns.size()!=sourceColumns.size()){
//列个数不对称,SQL出错
}else{
for(int i=0;i<definedColumns.size();i++){
KColumn dc=definedColumns.get(i);
KColumn sc=sourceColumns.get(i);
if(sc!=null&&sc.vColumn&&sc.getRefColumns().size()>0){
dc.addRefColumnList(sc.getRefColumns());
}else {
dc.addRefColumn(sc);
}
}
}
}else{
//类似这样的语句没有列名定义时:create table t as select a,b from c;
for (KColumn refCol : sourceColumns) {
if(refCol==null){
definedColumns.add(null);
continue;
}
// String tcName = refCol.columnName;
// if (refCol.alias != null)
// tcName = refCol.alias;
Map<String,String> map = getColNameAndAlias(refCol);
KColumn col = new KColumn();
//TODO 对列名进行分析?
// col.columnName = "default_col";
// col.alias = "默认字段";
col.columnName=map.get("colName");
col.alias=map.get("alias");
col.columnPrefix = tableName;
col.db=mTable.db;
col.schema=mTable.schema;
col.tableName=mTable.getName();
//表的状态是临时表还是实体表
col.isEvTable = mTable.isCreated();
col.addRefColumn(refCol);
definedColumns.add(col);
}
}
}else{
//TODO 没有列的数据,出错了
}
mColumns=definedColumns;
// mTable.addColumns(definedColumns);
mTable.setRealTable(false);
//把字段存在创建的表里面
mTable.addColumns(getParseResult());
mParserContext.addCreatedTable(mTable.getFullName(), mTable);
}else if(mCreateSql instanceof TCreateViewSqlStatement){
//视图
}
}
private Map<String,String> getColNameAndAlias(KColumn refCol){
Map<String,String> map = new HashMap<String, String>();
if(refCol.vColumn){
if(refCol.columnName!=refCol.alias&&refCol.alias!=null&&refCol.columnName!=null){
map.put("colName", refCol.alias);
map.put("alias", refCol.alias);
}else{
if(refCol.getRefColumns().size()>1){
map.put("colName", "default_col");
map.put("alias", "默认字段");
}else{
KColumn c = new KColumn();
if(refCol.getRefColumns().size()>0){
c = refCol.getRefColumns().get(0);
}
map.put("colName", c.columnName == null ? "default_col" : c.columnName);
map.put("alias", c.alias == null ? "默认字段" : c.alias);
}
}
}else{
map.put("colName", refCol.columnName == null ? "default_col" : refCol.columnName);
map.put("alias", refCol.alias == null ? "默认字段" : refCol.alias);
}
return map;
}
/**
* 验证是否为实体表
* @param retVal
* @return
*/
private boolean isEvTable(String retVal){
boolean isEvTable = true;
String pattern = "^\\s*CREATE\\s*(VOLATILE|MULTISET)\\s*(VOLATILE|MULTISET)";
Pattern pat = Pattern.compile(pattern);
Matcher mat = pat.matcher(retVal.toUpperCase().trim());
while(mat.find()){
isEvTable = false;
break;
}
return isEvTable;
}
public List<KColumn> getParseResult() {
super.getResultParser(mParserContext, mTable, mColumns);
return mColumns;
}
}

41
src/main/java/com/guozhi/bloodanalysis/parser/common/DeleteParser.java

@ -0,0 +1,41 @@
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
package com.guozhi.bloodanalysis.parser.common;
import java.util.ArrayList;
import java.util.List;
import com.guozhi.bloodanalysis.parser.utils.ExprToColumn;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import gudusoft.gsqlparser.nodes.TExpression;
import gudusoft.gsqlparser.nodes.TWhereClause;
import gudusoft.gsqlparser.stmt.TDeleteSqlStatement;
public class DeleteParser{
private TDeleteSqlStatement mDeleteStmt;
private List<KColumn> mColumns;
private ParserContext mParserContext;
public DeleteParser(TDeleteSqlStatement deleteSqlStatement,ParserContext context){
mDeleteStmt=deleteSqlStatement;
mColumns=new ArrayList<KColumn>();
this.mParserContext=context;
}
public void parse() {
String tableName=mDeleteStmt.getTargetTable().toString();
TWhereClause whereClause=mDeleteStmt.getWhereClause();
if(whereClause!=null) {
TExpression expr = whereClause.getCondition();
new ExprToColumn(mDeleteStmt, mParserContext, false).exprVisit(expr);
}
//TODO
}
public List<KColumn> getParseResult() {
return mColumns;
}
}

50
src/main/java/com/guozhi/bloodanalysis/parser/common/DropParser.java

@ -0,0 +1,50 @@
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
package com.guozhi.bloodanalysis.parser.common;
import java.util.List;
import com.guozhi.bloodanalysis.parser.utils.SpUtils;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import lombok.extern.slf4j.Slf4j;
import gudusoft.gsqlparser.TCustomSqlStatement;
import gudusoft.gsqlparser.stmt.TDropTableSqlStatement;
import gudusoft.gsqlparser.stmt.TDropViewSqlStatement;
@Slf4j
public class DropParser{
private TCustomSqlStatement mDropStmt;
private ParserContext mParserContext;
public DropParser(TCustomSqlStatement stmt,ParserContext context){
this.mDropStmt=stmt;
this.mParserContext=context;
}
public void parse() {
if(mDropStmt instanceof TDropTableSqlStatement){
TDropTableSqlStatement dropTableStmt=(TDropTableSqlStatement)mDropStmt;
String fullname=dropTableStmt.getTableName().toString();
String tableName= SpUtils.removeQuote(fullname);
VTable t=mParserContext.findExistedTable(tableName,true);
if(t==null){
}
mParserContext.dropExistTable(tableName);
//TODO 应该不用理这个表了,
}else if( mDropStmt instanceof TDropViewSqlStatement){
}
}
public List<KColumn> getParseResult() {
return null;
}
}

211
src/main/java/com/guozhi/bloodanalysis/parser/common/InsertParser.java

@ -0,0 +1,211 @@
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
package com.guozhi.bloodanalysis.parser.common;
import java.util.ArrayList;
import java.util.List;
import com.guozhi.bloodanalysis.parser.utils.ExprToColumn;
import com.guozhi.bloodanalysis.parser.utils.KColumnProvider;
import com.guozhi.bloodanalysis.parser.utils.SpUtils;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import gudusoft.gsqlparser.nodes.TExpression;
import gudusoft.gsqlparser.nodes.TMultiTarget;
import gudusoft.gsqlparser.nodes.TMultiTargetList;
import gudusoft.gsqlparser.nodes.TObjectNameList;
import gudusoft.gsqlparser.nodes.TResultColumn;
import gudusoft.gsqlparser.nodes.TResultColumnList;
import gudusoft.gsqlparser.stmt.TInsertSqlStatement;
import gudusoft.gsqlparser.stmt.TSelectSqlStatement;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;
@EqualsAndHashCode(callSuper = true)
@Slf4j
@Component
@NoArgsConstructor
@Data
public class InsertParser extends BaseParser{
private List<KColumn> mColumns;
private TInsertSqlStatement mInsertSql;
private ParserContext mParserContext;
private VTable tTable = null;
@Autowired
private ApplicationContext applicationContext;
public InsertParser(TInsertSqlStatement insertSqlStatement,ParserContext context) {
mInsertSql = insertSqlStatement;
this.mParserContext=context;
}
public void parse() {
//INSERT INTO 表名称 VALUES (值1, 值2,....)
//INSERT INTO table_name (列1, 列2,...) VALUES (值1, 值2,....)
//insert into table_name select from....
//
String targetTableName = mInsertSql.getTargetTable().getFullName();
// System.out.println(mInsertSql.toString());
String[] nameInfo= SpUtils.analyseTableName(targetTableName);
tTable=new VTable(nameInfo[2],nameInfo[2]);
tTable.db=mParserContext.getDefaultDb();
tTable.schema=mParserContext.getDefaultSchema();
boolean exportColumn=false;
VTable createdTable=mParserContext.findExistedTable(tTable.getFullName(),false);
//在解析的时候前面已经创建了临时表,这里再次使用就直接使用临时表
//,但是一开始解析的临时表是没有字段的,这次解析insert语句是有字段的,如果直接使用导致临时表字段缺失
if(createdTable==null){
exportColumn=true;
}else{
tTable = createdTable;
}
//目标表字段
TObjectNameList objectNameList = mInsertSql.getColumnList();
mColumns=new ArrayList<KColumn>();
if (objectNameList != null) {
for (int i = 0; i < objectNameList.size(); i++) {
KColumn col = new KColumn();
//TODO 对列名进行分析?
col.columnName = objectNameList.getObjectName(i).toString();
col.alias = col.columnName;
col.columnPrefix = targetTableName;
col.db=tTable.db;
col.schema=tTable.schema;
col.tableName=tTable.getName();
col.isEvTable = exportColumn ? true : createdTable.isCreated();//是否真实表
col.export=exportColumn;
//grc 记录级标记
col.etlFlag="12";
mColumns.add(col);
}
}else{
//这种形式只会按列顺序来计算,不会按列的名称匹配,所以列的顺序很重要
KColumnProvider kColumnProvider = applicationContext.getBean(KColumnProvider.class);
List<KColumn> columns=kColumnProvider.getColumns(null,targetTableName, mParserContext);
if(columns!=null) {
mColumns.addAll(columns);
}
}
List<KColumn> sourceColumns=new ArrayList<KColumn>();
switch (mInsertSql.getValueType()) {
case 1:
TMultiTargetList valueList = mInsertSql.getValues();
for (int i = 0; i < valueList.size(); i++) {
TMultiTarget value = valueList.getMultiTarget(i);
//TODO 对不同类型进行不同的处理
TResultColumnList resultColumnList=value.getColumnList();
for (int j = 0; j < resultColumnList.size(); j++) {
TResultColumn column = resultColumnList.getResultColumn(j);
TExpression expr = column.getExpr();
//TODO currentTable可能会有些影响?
sourceColumns.add(new ExprToColumn(mInsertSql, mParserContext, false).exprVisit(expr));
}
}
break;
case 2://SubQuery
TSelectSqlStatement subQuery = mInsertSql.getSubQuery();
SelectParser sp = applicationContext.getBean(SelectParser.class);
sp.init(subQuery,mParserContext,true);
//这里解析表依赖关系》????当源表和目标表??
sp.parse();
//源字段,解析出的字段为null.null.null.AGMT_ID as AGMT_ID???
List<KColumn> subQueryColumns = sp.getParseResult();
for(KColumn c: subQueryColumns){
if(c!=null&&c.isStar&&c.getRefColumns().size()>0){
sourceColumns.addAll(c.getRefColumns());
}else{
sourceColumns.add(c);
}
}
break; //case 2: subQuery
case 3:break;
case 4:break;
case 6:break;
case 5:
mInsertSql.getFunctionCall();
break;
case 7:
mInsertSql.getSetColumnValues();
break;
case 8:
mInsertSql.getRecordName();
default:
break;
}
if(mColumns.size()>sourceColumns.size()){
System.out.println(mColumns.size()+"--"+sourceColumns.size());
log.error("目标列与源列数量不一致");
return;
}
if(mColumns.size()==0){
log.info("目标表:"+targetTableName+",没有定义字段,设置默认字段,总数:"+sourceColumns.size());
//如果没有指定字段,则默认一个字段,如有其他必须要字段,可以通过元数据管理系统获取,但需要重新开发。
for (int i = 0; i < sourceColumns.size(); i++) {
KColumn col = new KColumn();
//TODO 对列名进行分析?
col.columnName = "default_col";
col.alias = "默认字段";
col.columnPrefix = targetTableName;
col.db=tTable.db;
col.schema=tTable.schema;
col.tableName=tTable.getName();
col.isEvTable = exportColumn || createdTable.isCreated();
col.export=exportColumn;
mColumns.add(col);
}
}
//假设列之间关系完全是可以上的
int length=mColumns.size();
//从源字段中获取到etlSrcTabName====
if(mParserContext.getEtlSrcTabName() == null) {
for(int i = 0;i < length;i++){
KColumn column=mColumns.get(i);
KColumn refCol=sourceColumns.get(i);
if(column.columnName.equals(KColumn.etlSrcTabName)) {
String etlSrcColName = refCol.columnName;
etlSrcColName = etlSrcColName.replaceAll("'", "");
mParserContext.setEtlSrcTabName(etlSrcColName);
break;
}
}
}
for (int i=0;i<length;i++) {
KColumn column=mColumns.get(i);
KColumn refCol=sourceColumns.get(i);
column.setEtlFlag(mParserContext.getEtlSrcTabName());
if(refCol!=null) {
refCol.setEtlFlag(mParserContext.getEtlSrcTabName());
if(refCol.vColumn&&refCol.getRefColumns().size()>0){
column.getRefColumns().addAll(refCol.getRefColumns());
}else {
column.addRefColumn(refCol);
}
}
}
tTable.addColumns(mColumns);
}
public List<KColumn> getParseResult() {
//在父类中先清理一次结果
super.getResultParser(mParserContext, tTable, mColumns);
return mColumns;
}
}

141
src/main/java/com/guozhi/bloodanalysis/parser/common/ParserContext.java

@ -0,0 +1,141 @@
package com.guozhi.bloodanalysis.parser.common;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by Walk.Lai on 2015/8/1.
*/
public class ParserContext {
// 使用create语句创建的表
private Map<String, VTable> mCreatedTables = new HashMap<String, VTable>();
/** 只保存当前语句中出现的表,包括虚表 */
private List<VTable> mTableInCurrentStatement = new ArrayList<VTable>();
// 当前语句所构成的虚表
private VTable mCurrentTable;
//wxl TODO 对于存储过程可以知道归属库和schema,但对于perl、python、shell、sql脚本,就获取不到了。
private String mDefaultDb;//TODO db or sys?
private String mDefaultSchema;
/**
* grc etl源表名称
*/
private String etlSrcTabName;
public VTable findExistedTable(String tableName, boolean isCopy) {
if (isCopy) {
return copyTable(mCreatedTables.get(tableName));
} else {
return mCreatedTables.get(tableName);
}
}
private VTable copyTable(VTable s) {
if (s == null) {
return null;
}
VTable t = new VTable(s.getName());
t.alias = s.alias;
t.db = s.db;
t.name = s.name;
t.schema = s.schema;
t.setCreated(s.isCreated());
t.setRealTable(s.isRealTable());
for (int i = 0; i < s.getColumns().size(); i++) {
KColumn _col = s.getColumns().get(i);
KColumn col = new KColumn();
// TODO 对列名进行分析?
col.columnName = _col.columnName;
col.alias = _col.alias;
col.columnPrefix = _col.columnPrefix;
col.db = _col.db;
col.schema = _col.schema;
col.tableName = _col.tableName;
col.isEvTable = _col.isEvTable;
col.export = _col.export;
t.addColumn(col);
}
return t;
}
public void dropExistTable(String tableName) {
mCreatedTables.remove(tableName);
}
public void addVTable(VTable table) {
this.mTableInCurrentStatement.add(table);
}
/**
* 当前解析语句中保存的表
*
* @return
*/
public List<VTable> getTableInCurrentStatement() {
return mTableInCurrentStatement;
}
public VTable getCurrentTable() {
return mCurrentTable;
}
public void setCurrentTable(VTable currentTable) {
this.mCurrentTable = currentTable;
}
public String getDefaultDb() {
return mDefaultDb;
}
public void setDefaultDb(String defaultDb) {
this.mDefaultDb = defaultDb;
}
public String getDefaultSchema() {
return mDefaultSchema;
}
public void setDefaultSchema(String defaultSchema) {
this.mDefaultSchema = defaultSchema;
}
public Map<String, VTable> getCreatedTables() {
return mCreatedTables;
}
public VTable findTableInCurrentStatement(String tableName) {
for (VTable t : mTableInCurrentStatement) {
if (t.getName().equals(tableName))
return t;
if (t.getAlias() != null) {
if (t.getAlias().equals(tableName))
return t;
}
}
return null;
}
public boolean addCreatedTable(String tableName, VTable table) {
if (mCreatedTables.get(tableName) == null) {
mCreatedTables.put(tableName, table);
}
return false;
}
public String getEtlSrcTabName() {
return etlSrcTabName;
}
public void setEtlSrcTabName(String etlSrcTabName) {
this.etlSrcTabName = etlSrcTabName;
}
}

294
src/main/java/com/guozhi/bloodanalysis/parser/common/SelectParser.java

@ -0,0 +1,294 @@
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
package com.guozhi.bloodanalysis.parser.common;
import java.util.ArrayList;
import java.util.List;
import com.guozhi.bloodanalysis.parser.utils.ExprToColumn;
import com.guozhi.bloodanalysis.parser.utils.KDatabaseProvider;
import com.guozhi.bloodanalysis.parser.utils.SpUtils;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import gudusoft.gsqlparser.nodes.TExpression;
import gudusoft.gsqlparser.nodes.TExpressionList;
import gudusoft.gsqlparser.nodes.TIntoClause;
import gudusoft.gsqlparser.nodes.TJoin;
import gudusoft.gsqlparser.nodes.TJoinItem;
import gudusoft.gsqlparser.nodes.TJoinItemList;
import gudusoft.gsqlparser.nodes.TJoinList;
import gudusoft.gsqlparser.nodes.TResultColumn;
import gudusoft.gsqlparser.nodes.TResultColumnList;
import gudusoft.gsqlparser.nodes.TTable;
import gudusoft.gsqlparser.nodes.TWhereClause;
import gudusoft.gsqlparser.stmt.TSelectSqlStatement;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
@EqualsAndHashCode(callSuper = true)
@Slf4j
@Component
@NoArgsConstructor
@Scope("prototype")
@Data
public class SelectParser extends BaseParser {
private List<KColumn> mSourceColumn = new ArrayList<KColumn>();
private TSelectSqlStatement mSelect;
private boolean mKeepConstant=false;//是否保留常量值
private ParserContext mParserContext;
private VTable vTable;//将整个查询语句当成一个表进行处理
@Autowired
ApplicationContext applicationContext;
public SelectParser(TSelectSqlStatement statement,ParserContext context) {
mSelect = statement;
vTable=new VTable(SpUtils.generateId(statement.toString(),""));
this.mParserContext=context;
}
public SelectParser(TSelectSqlStatement statement,ParserContext context,boolean keepConstant) {
mSelect = statement;
mKeepConstant=keepConstant;
vTable=new VTable(SpUtils.generateId(statement.toString(),""));
this.mParserContext=context;
}
public void init(TSelectSqlStatement statement, ParserContext context, boolean keepConstant) {
mSelect = statement;
mKeepConstant=keepConstant;
vTable=new VTable(SpUtils.generateId(statement.toString(),""));
this.mParserContext=context;
}
public void initWithTable(TSelectSqlStatement statement,ParserContext context,VTable vTable,boolean keepConstant) {
mSelect = statement;
mKeepConstant=keepConstant;
this.vTable=vTable;
this.mParserContext=context;
}
// public SelectParser(TSelectSqlStatement statement,ParserContext context,VTable vTable,boolean keepConstant) {
// mSelect = statement;
// mKeepConstant=keepConstant;
// this.vTable=vTable;
// this.mParserContext=context;
// }
public void parse() {
//当前表的关系已存在,则直接取消解析
if(isTableExisted(vTable)){
return ;
}
//where 部分
TWhereClause where = mSelect.getWhereClause();
if(where != null) {
// System.out.println(where);
}
//TODO 将CTE转化为表关系
mParserContext.setCurrentTable(vTable);
//Join items from后边部分都属于join
TJoinList joinList = mSelect.joins;
if (joinList != null) {
for (int i = 0; i < joinList.size(); i++) {
TJoin join = joinList.getJoin(i);
TTable table=join.getTable();
vTable.addTable(analyseTable(table));
TJoinItemList joinItemList = join.getJoinItems();
if (joinItemList != null) {
for (int j = 0; j < joinItemList.size(); j++) {
TJoinItem joinItem = joinItemList.getJoinItem(j);
TTable joinTable = joinItem.getTable();
vTable.addTable(analyseTable(joinTable));
//TExpression expr = joinItem.getOnCondition();
//TODO 对On的列进行分析,如果不是这个表的列,则是会影响这个列的
}
}
}
} else {
log.error("找不到From对应的表");
return;
}
mParserContext.setCurrentTable(vTable);
// mSelect.getSetOperatorType()
//对 union, unionall, except等进行处理
if(mSelect.getSetOperator()!=TSelectSqlStatement.setOperator_none){
VTable currentTable = mParserContext.getCurrentTable();//暂存
SelectParser sp1 = new SelectParser(mSelect.getLeftStmt(), mParserContext);
sp1.parse();
SelectParser sp2 = new SelectParser(mSelect.getRightStmt(), mParserContext);
sp2.parse();
currentTable.addTable(sp1.vTable);
currentTable.addTable(sp2.vTable);
mParserContext.setCurrentTable(currentTable);//还原
currentTable.addColumns(sp1.getParseResult());
currentTable.addColumns(sp2.getParseResult());
}else {
TResultColumnList resultColumnList = mSelect.getResultColumnList();
for (int i = 0; i < resultColumnList.size(); i++) {
TResultColumn column = resultColumnList.getResultColumn(i);
TExpression expr = column.getExpr();
ExprToColumn etc = applicationContext.getBean(ExprToColumn.class);
etc.setMStmt(mSelect);
etc.setMParserContext(mParserContext);
KColumn rc = etc.exprVisit(expr);
// KColumn rc = new ExprToColumn(mSelect, mParserContext, mKeepConstant).exprVisit(expr);
///$$$$$$$$$$$$$$$$$$$$$$$grc 记录级标志
// rc.setEtlFlag("12");
mSourceColumn.add(rc);
}
}
//into table, 对应表的字段依赖于结果字段中的别名,根据上边结果创建依赖关系
/*
SELECT *
INTO new_table_name [IN externaldatabase]
FROM old_tablename
*/
//TODO select into相当于创建一个新表
TIntoClause intoClause = mSelect.getIntoClause();
if (intoClause != null) {
TExpressionList exprList = intoClause.getExprList();
List<KColumn> newColumns = new ArrayList<KColumn>(mSourceColumn.size());
for (int i = 0; i < exprList.size(); i++) {
TExpression expr = exprList.getExpression(i);
String tableName = null;
switch (expr.getExpressionType()) {
case in_t:
tableName = SpUtils.removeQuote(expr.getLeftOperand().toString());
//对表名进行处理,可能含有数据库
break;
case simple_object_name_t:
//select语句的关联关系
List<VTable> fromTables=vTable.getFromTables();
String[] nameInfo= SpUtils.analyseTableName(expr.toString());
vTable=new VTable(nameInfo[2],nameInfo[2]);
vTable.db=mParserContext.getDefaultDb();
vTable.schema=mParserContext.getDefaultSchema();
vTable.getFromTables().addAll(fromTables);
break;
}
for (KColumn col : mSourceColumn) {
if(col==null)continue;
KColumn nc = new KColumn();
nc.columnPrefix = tableName;
nc.columnName = col.alias == null ? col.columnName : col.alias;
nc.alias = null;
if(col.vColumn){
nc.getRefColumns().addAll(col.getRefColumns());
}else {
nc.addRefColumn(col);
}
newColumns.add(nc);
}
}
//TODO 返回结果转化为该表的,暂时这样处理,是不是这样处理得再考虑考虑
this.mSourceColumn=newColumns;
}
vTable.addColumns(mSourceColumn);
mParserContext.addVTable( vTable);
}
private boolean isTableExisted(VTable vTable) {
return false;
}
private VTable analyseTable(TTable table){
String fullName=table.getFullName();
String tableAlias=table.getAliasName();
VTable vTable=null;
if(StringUtils.isBlank(fullName)){//子查询,fullName为空
vTable=new VTable(SpUtils.generateId(table.toString(),tableAlias));
}else{//普通表名//StringUtils.isNotBlank(fullName)
String[] nameInfo=SpUtils.analyseTableName(fullName);
vTable=new VTable(nameInfo[2],nameInfo[2]);
vTable.db=mParserContext.getDefaultDb();
vTable.schema=mParserContext.getDefaultSchema();
}
if(StringUtils.isNotBlank(tableAlias)){
vTable.setAlias(tableAlias);
}
//已知的表
VTable createdTable = mParserContext.findExistedTable(vTable.getFullName(),true);
if(createdTable!=null) {
createdTable.setAlias(vTable.alias);
return createdTable;
}
switch (table.getTableType()) {
case objectname:
//真实数据库中的表,查找表对应系统,可能存在多个情况
KDatabaseProvider kDatabaseProvider = applicationContext.getBean(KDatabaseProvider.class);
kDatabaseProvider.getDatabase(vTable, mParserContext);
break;
case subquery:
TSelectSqlStatement subQuery = table.getSubquery();
// VTable currentTable=mParserContext.getCurrentTable();
SelectParser sp = applicationContext.getBean(SelectParser.class);
sp.initWithTable(subQuery,mParserContext,vTable,mKeepConstant);
sp.parse();
// mParserContext.setCurrentTable(vTable);
//创建依赖关系 重复了。。。。。。。。。。
// vTable.addColumns(sp.getParseResult());
// vTable.getFromTables().addAll(sp.vTable.getFromTables());
break; //subquery
case function:
// table.getFuncCall();
break;
case tableExpr:break;
case rowList:break;
//SQL Server only
case containsTable:break;
case freetextTable:break;
default:break;
}
mParserContext.addVTable(vTable);
return vTable;
}
public List<KColumn> getParseResult() {
List<KColumn> ret=new ArrayList<KColumn>();
for(KColumn column: vTable.getColumns()){
if(column==null){
ret.add(column);
}else {
if (column.isStar) {
ret.addAll(column.getRefColumns());
} else {
ret.add(column);
}
}
}
return ret;
}
}

179
src/main/java/com/guozhi/bloodanalysis/parser/common/UpdateParser.java

@ -0,0 +1,179 @@
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
package com.guozhi.bloodanalysis.parser.common;
import java.util.ArrayList;
import java.util.List;
import com.guozhi.bloodanalysis.parser.utils.ExprToColumn;
import com.guozhi.bloodanalysis.parser.utils.KColumnProvider;
import com.guozhi.bloodanalysis.parser.utils.SpUtils;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import gudusoft.gsqlparser.nodes.TExpression;
import gudusoft.gsqlparser.nodes.TJoinList;
import gudusoft.gsqlparser.nodes.TResultColumn;
import gudusoft.gsqlparser.nodes.TResultColumnList;
import gudusoft.gsqlparser.nodes.TTable;
import gudusoft.gsqlparser.nodes.TTableList;
import gudusoft.gsqlparser.nodes.TWhereClause;
import gudusoft.gsqlparser.stmt.TSelectSqlStatement;
import gudusoft.gsqlparser.stmt.TUpdateSqlStatement;
import lombok.NoArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;
@Component
@NoArgsConstructor
public class UpdateParser{
private TUpdateSqlStatement mUpdateStmt;
private List<KColumn> mColumns;
private ParserContext mParserContext;
@Autowired
ApplicationContext applicationContext;
public UpdateParser(TUpdateSqlStatement stmt, ParserContext context) {
mUpdateStmt = stmt;
mColumns = new ArrayList<KColumn>();
this.mParserContext = context;
}
public void parse() {
TTable targetTable = mUpdateStmt.getTargetTable();
VTable updateTable = new VTable(targetTable.getName(), targetTable.getName());
updateTable.db = mParserContext.getDefaultDb();
updateTable.schema = mParserContext.getDefaultSchema();
boolean exportColumn = false;
VTable createdTable = mParserContext.findExistedTable(updateTable.getFullName(),true);
if (createdTable == null) {
exportColumn = true;
}
mParserContext.setCurrentTable(updateTable);
TTableList tableList = mUpdateStmt.tables;
if (tableList != null) {
for (int i = 0; i < tableList.size(); i++) {
TTable fromTable = tableList.getTable(i);
VTable table=analyseTable(fromTable);
if(table.getFullName().equals(updateTable.getFullName()))continue;
mParserContext.getTableInCurrentStatement().add(table);
}
}
TJoinList joinList=mUpdateStmt.getReferenceJoins();
if(joinList!=null){
for(int i=0;i<joinList.size();i++){
System.out.println(joinList.getJoin(i));
}
}
TWhereClause whereClause = mUpdateStmt.getWhereClause();
if(whereClause!=null) {
TExpression conditionExpr = whereClause.getCondition();
}
// List<KColumn> conditionColumns = new ArrayList<KColumn>();
// conditionColumns.add(new ExprToColumn(mUpdateStmt, mParserContext, false).exprVisit(conditionExpr));
TResultColumnList rcList = mUpdateStmt.getResultColumnList();
for (int i = 0; i < rcList.size(); i++) {
TResultColumn rc = rcList.getResultColumn(i);
TExpression expr = rc.getExpr();
KColumn dest= new KColumn();
dest.db=updateTable.db;
dest.schema=updateTable.schema;
dest.tableName=updateTable.getName();
dest.columnName=expr.getLeftOperand().toString();
//表的状态是临时表还是实体表
dest.isEvTable = updateTable.isCreated();
dest.export=exportColumn;
KColumn src= new ExprToColumn(mUpdateStmt, mParserContext, false).exprVisit(expr.getRightOperand()) ;
dest.addRefColumn(src);
mColumns.add(dest);
}
//依赖关系只计算不同表之间的
/* for (KColumn column : conditionColumns) {
//TODO 列所属表是否相同的判断需要加强
System.out.println(column.tableName);
if (column != null && !column.tableName.equals(targetTable.getName())) {
for (KColumn mColumn : mColumns) {
mColumn.addRefColumn(column);
}
}
}*/
}
public List<KColumn> getParseResult() {
return mColumns;
}
private VTable analyseTable(TTable table) {
String fullName = table.getFullName();
String tableAlias = table.getAliasName();
VTable vTable = null;
if (StringUtils.isBlank(fullName)) {//子查询
vTable = new VTable(SpUtils.generateId(table.toString(), tableAlias));
} else if (StringUtils.isNotBlank(fullName)) {//普通表名
String[] nameInfo = SpUtils.analyseTableName(fullName);
vTable = new VTable(nameInfo[2], nameInfo[2]);
vTable.db = mParserContext.getDefaultDb();
vTable.schema = mParserContext.getDefaultSchema();
}
if (StringUtils.isNotBlank(tableAlias)) {
vTable.setAlias(tableAlias);
}
//已知的表
VTable createdTable = mParserContext.findExistedTable(vTable.getFullName(),true);
if (createdTable != null) {
createdTable.setAlias(vTable.alias);
return createdTable;
}
switch (table.getTableType()) {
case objectname:
vTable.setRealTable(true);
//真实数据库中的表,查找表对应关系
vTable.addColumns(new KColumnProvider().getColumns(null,table.getFullName(), mParserContext));
break;
case subquery:
TSelectSqlStatement subQuery = table.getSubquery();
VTable currentTable = mParserContext.getCurrentTable();
SelectParser sp = applicationContext.getBean(SelectParser.class);
sp.initWithTable(subQuery, mParserContext, vTable, true);
sp.parse();
mParserContext.setCurrentTable(currentTable);
//创建依赖关系
vTable.addColumns(sp.getParseResult());
break; //subquery
case function:
//table.getFuncCall();
break;
case tableExpr:
break;
case rowList:
break;
//SQL Server only
case containsTable:
break;
case freetextTable:
break;
default:
break;
}
mParserContext.addVTable(vTable);
return vTable;
}
}

189
src/main/java/com/guozhi/bloodanalysis/parser/utils/ColumnRefFinder.java

@ -0,0 +1,189 @@
package com.guozhi.bloodanalysis.parser.utils;
import com.guozhi.bloodanalysis.entity.MetaColumn;
import com.guozhi.bloodanalysis.mapper.MetaBloodAnalysisMapper;
import com.guozhi.bloodanalysis.parser.common.ParserContext;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* Created by Walk.Lai on 2015/8/8.
*/
@Slf4j
@Component
@Scope("prototype")
public class ColumnRefFinder {
@Autowired
MetaBloodAnalysisMapper metaBloodAnalysisMapper;
public void find(KColumn column, ParserContext parserContext) {
VTable currentTable = parserContext.getCurrentTable();
if(currentTable==null){
return;
}
List<VTable> fromTables = currentTable.getFromTables();
VTable foundTable = findRelatedTable(column, fromTables,parserContext);
if (foundTable == null) {// 实在找不到
foundTable = findRelatedTable(column,parserContext.getTableInCurrentStatement(),parserContext);
if (foundTable == null) {
log.error("找不到列对应的表:" + column.toString());
return;
}
}
if (foundTable.isRealTable()) {
column.db = foundTable.db;
column.schema = foundTable.schema;
column.tableName = foundTable.getName();
column.columnPrefix = foundTable.getFullName();
// 找到这个表,看是不是创建的临时表
VTable table = parserContext.getCreatedTables().get(foundTable.getFullName());
if (table == null) {
column.export = true;
// 已经是实表了,直接关联上表就行了
if (column.isStar) {
// TODO 通过接口查找数据库获得表对应的列信息
if (foundTable.getColumns().size() > 0) {
column.addRefColumnList(foundTable.getColumns());
} else {
column.addRefColumnList(new KColumnProvider().getColumns(foundTable,null,parserContext));
}
}
} else {
column.columnPrefix = foundTable.getFullName();
if (column.isStar) {
column.addRefColumnList(table.getColumns());
} else {
for (KColumn c : foundTable.getColumns()) {
if (c == null) {
continue;
}
String name = (c.alias == null ? c.columnName : c.alias);
if (name.equals(column.columnName)) {
column = c;
break;
}
}
}
}
return;
}
if (column.isStar) {
if (foundTable.getColumns().size() > 0) {
column.addRefColumnList(foundTable.getColumns());
} else {
// TODO 找到表的定义和对应列数据
List<KColumn> columns = new KColumnProvider().getColumns(null,column.columnPrefix, parserContext);
column.addRefColumnList(columns);
}
} else {
for (KColumn s : foundTable.getColumns()) {
if (s == null) {
continue;
}
String name = s.alias == null ? s.columnName : s.alias;
if (name != null && name.equals(column.columnName)) {
addRefCol(s, column);
}
}
// iterFromTableColumn(column, foundTable);
}
}
private void addRefCol(KColumn s, KColumn column) {
KColumn t = new KColumn();
if (s.vColumn) {
List<KColumn> list = s.getRefColumns();
for (KColumn _s : list) {
addRefCol(_s, column);
}
} else {
if (s.db == null || s.tableName == null || s.schema == null) {
List<KColumn> list = s.getRefColumns();
for (KColumn _s : list) {
addRefCol(_s, column);
}
} else {
t.alias = s.alias;
t.columnName = s.columnName;
t.columnPrefix = s.columnPrefix;
t.db = s.db;
t.export = s.export;
t.isEvTable = s.isEvTable;
t.isStar = s.isStar;
t.schema = s.schema;
t.tableName = s.tableName;
t.vColumn = s.vColumn;
// 在Union等情况下,一个列会关联多个表的同名列
column.addRefColumn(t);
}
}
}
private VTable findRelatedTable(KColumn column, List<VTable> fromTables, ParserContext parserContext) {
if (fromTables.size() == 1) {
return fromTables.get(0);
}
if (column.columnPrefix != null) {// 明确知道属于哪个表,必须找到这个表
// TODO 如何判断列的前缀是哪个表?
for (VTable t : fromTables) {
// 一般列名的前缀也就是表名或别名,不会包含数据库等
if (column.columnPrefix.equals(t.getAlias())
|| column.columnPrefix.equals(t.getName())
|| column.columnPrefix.equals(t.getFullName())) {
return t;
}
}
} else {
VTable t = new VTable("V" + System.currentTimeMillis());
// 没有前缀,无法确认是哪个表的字段,需要查找数据确认字段
for (VTable vt : fromTables) {
if(vt.isRealTable()){
MetaColumn col = metaBloodAnalysisMapper.isColExis(vt.db,vt.schema, vt.name, column.columnName);
if(col != null){
t = vt;
break;
}
}else{
if(getRealTable(vt, column.columnName)){
t = vt;
break;
}
}
}
return t;
}
return null;
}
private boolean getRealTable(VTable t,String colName){
boolean isExis = false;
List<VTable> list = t.getFromTables();
for (VTable vt : list) {
if(vt.isRealTable()){
MetaColumn metaColumn = metaBloodAnalysisMapper.isColExis(vt.db,vt.schema, vt.name, colName);
if (metaColumn != null) {
isExis = true;
break;
}
}else{
getRealTable(vt,colName);
}
}
return isExis;
}
}

29
src/main/java/com/guozhi/bloodanalysis/parser/utils/Constants.java

@ -0,0 +1,29 @@
package com.guozhi.bloodanalysis.parser.utils;
public class Constants {
public static String kvsImplClass = "com.pactera.edg.am.etlmapping.perlparser.perlclean.common.GenericKeyVarSimulator";
public static String sfpImplClass = "com.pactera.edg.am.etlmapping.perlparser.perlclean.common.GenericSqlFragmentParser";
public static String lgsImplClass = "com.pactera.edg.am.etlmapping.perlparser.perlclean.common.GenericLogGenSimulator";
public static String lnImplClass = "com.pactera.edg.am.etlmapping.perlparser.perlclean.common.GenericLogNormalizer";
public static String dcfppImplClass = "com.pactera.edg.am.etlmapping.perlparser.perlclean.common.GenericDbCfgFilePathParser";
/**
* GP类脚本执行体常量定义
*/
public static String GP_RUN_COMMAND_STR = "SUB RUN_PSQL_COMMAND";
/**
* ORACLE类脚本执行体常量定义
*/
public static String ORA_RUN_COMMAND_STR = "SUB RUN_SQLPLUS_COMMAND";
/**
* TD类脚本执行体常量定义
*/
public static String TRD_RUN_COMMAND_STR = "SUB RUN_BTEQ_COMMAND";
// 定义需解析文件类型
public final static String FILE_SUFFIX = ".SQL";//PL
//脚本状态枚举,有效、不标准、故障、不是perl脚本,无效
public static enum fileSts {
VALID,NOTSTANDARD,FAULT,NOTPERL,INVALID
}
}

133
src/main/java/com/guozhi/bloodanalysis/parser/utils/CryptUtils.java

@ -0,0 +1,133 @@
package com.guozhi.bloodanalysis.parser.utils;
import java.io.ByteArrayOutputStream;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.NoSuchAlgorithmException;
import java.security.spec.InvalidKeySpecException;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.DESKeySpec;
import sun.misc.BASE64Decoder;
import sun.misc.BASE64Encoder;
public class CryptUtils {
/**
* @param args
*/
public static void main(String[] args) {
String value = "hello12315677中国";
String ss = getInstance().encrypt(value);
String end = getInstance().decrypt(ss);
System.out.println("加密后:" + ss);
System.out.println("解密后:" + end);
}
private final static BASE64Encoder base64encoder = new BASE64Encoder();
private final static BASE64Decoder base64decoder = new BASE64Decoder();
private final static String encoding = "UTF-8";
static byte[] keyData = { 1, 9, 8, 2, 0, 8, 2, 1 };
static CryptUtils utils = null;
public static CryptUtils getInstance(){
if(utils == null){
utils = new CryptUtils();
}
return utils;
}
/**
* 加密
*/
public String encrypt(String str) {
String result = str;
if (str != null && str.length() > 0) {
try {
byte[] encodeByte = symmetricEncrypto(str.getBytes(encoding));
result = base64encoder.encode(encodeByte);
} catch (Exception e) {
e.printStackTrace();
}
}
return result;
}
/**
* 解密
*/
public String decrypt(String str) {
String result = str;
if (str != null && str.length() > 0) {
try {
byte[] encodeByte = base64decoder.decodeBuffer(str);
byte[] decoder = symmetricDecrypto(encodeByte);
result = new String(decoder, encoding);
} catch (Exception e) {
return str;
// e.printStackTrace();
}
}
return result;
}
/**
* 对称加密方法
*
* @param byteSource
* 需要加密的数据
* @return 经过加密的数据
* @throws Exception
*/
private byte[] symmetricEncrypto(byte[] byteSource) throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
int mode = Cipher.ENCRYPT_MODE;
Key key = createDESSecretKey();
Cipher cipher = Cipher.getInstance("DES");
cipher.init(mode, key);
byte[] result = cipher.doFinal(byteSource);
return result;
} catch (Exception e) {
throw e;
} finally {
baos.close();
}
}
/**
* 对称解密方法
*
* @param byteSource
* 需要解密的数据
* @return 经过解密的数据
* @throws Exception
*/
private byte[] symmetricDecrypto(byte[] byteSource) throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
int mode = Cipher.DECRYPT_MODE;
Key key = createDESSecretKey();
Cipher cipher = Cipher.getInstance("DES");
cipher.init(mode, key);
byte[] result = cipher.doFinal(byteSource);
return result;
} catch (Exception e) {
throw e;
} finally {
baos.close();
}
}
SecretKey createDESSecretKey()
throws NoSuchAlgorithmException, InvalidKeyException,
InvalidKeySpecException {
SecretKeyFactory keyFactory = SecretKeyFactory.getInstance("DES");
DESKeySpec keySpec = new DESKeySpec(keyData);
SecretKey key = keyFactory.generateSecret(keySpec);
return key;
}
}

5
src/main/java/com/guozhi/bloodanalysis/parser/utils/DatabaseType.java

@ -0,0 +1,5 @@
package com.guozhi.bloodanalysis.parser.utils;
public enum DatabaseType {
Oracle, Mysql, Teradata, SqlServer, GreenPlum, DB2, TDH, HIVE
}

14
src/main/java/com/guozhi/bloodanalysis/parser/utils/ErrorRecorder.java

@ -0,0 +1,14 @@
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
package com.guozhi.bloodanalysis.parser.utils;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ErrorRecorder {
public static void logError(String error){
log.error(error);
}
}

125
src/main/java/com/guozhi/bloodanalysis/parser/utils/ExportParseResultUtil.java

@ -0,0 +1,125 @@
package com.guozhi.bloodanalysis.parser.utils;
import java.util.*;
import com.guozhi.bloodanalysis.entity.DataLineageInfo;
import com.guozhi.bloodanalysis.entity.MetaBloodAnalysis;
import com.guozhi.bloodanalysis.mapper.MetaBloodAnalysisMapper;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class ExportParseResultUtil {
@Autowired
private MetaBloodAnalysisMapper metaBloodAnalysisMapper;
/**
* 从内存对象中导出数据
* @throws Exception
*/
public void expResult(List<KColumn> kColumns, DataLineageInfo dataLineageInfo) throws Exception{
log.info("正在导出解析结果");
List<String> result = new ArrayList<>();
for (KColumn tCol : kColumns) {
getOutList(kColumns, tCol, tCol.getRefColumns(), result);
}
outTable(result,dataLineageInfo);
log.info("解析结果导出完成");
}
private void outTable(List<String> result, DataLineageInfo dataLineageInfo){
for (String line : result) {
String lines [] = line.split(",");
MetaBloodAnalysis metaBloodAnalysis = new MetaBloodAnalysis();
metaBloodAnalysis.setId(UUID.randomUUID().toString().replace("-",""));
metaBloodAnalysis.setProId(dataLineageInfo.getOnum());
metaBloodAnalysis.setProName(dataLineageInfo.getProcName());
metaBloodAnalysis.setSourceSysCd(lines[0]);
metaBloodAnalysis.setSourceMdlName(lines[1]);
metaBloodAnalysis.setSourceTableName(lines[2]);
metaBloodAnalysis.setSourceColName(lines[3]);
metaBloodAnalysis.setTargetSysCd(lines[4]);
metaBloodAnalysis.setTargetMdlName(lines[5]);
String[] arr = lines[6].split("\\.");
String targetTableName = arr[arr.length-1];
metaBloodAnalysis.setTargetTableName(targetTableName);
metaBloodAnalysis.setTargetColName(lines[7]);
metaBloodAnalysisMapper.insert(metaBloodAnalysis);
}
}
private void getOutList(List<KColumn> kColumns,KColumn tCol,List<KColumn> sCols,List<String> list){
if(tCol.vColumn){
for (KColumn sCol : sCols) {
getOutList(kColumns, sCol, sCol.getRefColumns(),list);
}
}else{
for (KColumn sCol : sCols) {
if(sCol.vColumn){
getOutList(kColumns,tCol, sCol.getRefColumns(),list);
continue;
}
//只取源表是实体表 字段信息
if(sCol.isEvTable){
//目标表为实体表
if(tCol.isEvTable){
addLineStr(tCol, sCol, list);
}else{
String tKey = tCol.db+"."+tCol.schema+"."+tCol.tableName+"."+tCol.columnName+"."+tCol.isEvTable;
findTargetTab(kColumns, tKey, sCol, list);
}
}
if(sCol.getRefColumns()!=null&&sCol.getRefColumns().size()>0){
getOutList(kColumns,sCol,sCol.getRefColumns(), list);
}
}
}
}
private void findTargetTab(List<KColumn> kColumns,String tKey,KColumn sCol,List<String> list){
for (KColumn tCol : kColumns) {
List<KColumn> sCols = tCol.getRefColumns();
for (KColumn t_sCol : sCols) {
String sKey = t_sCol.db+"."+t_sCol.schema+"."+t_sCol.tableName+"."+t_sCol.columnName+"."+t_sCol.isEvTable;
//如果在源中找到则看当前目标是否为真实表
if(tKey.equals(sKey)){
if(tCol.isEvTable){
addLineStr(tCol, sCol, list);
}else{
String t_tKey = tCol.db+"."+tCol.schema+"."+tCol.tableName+"."+tCol.columnName+"."+tCol.isEvTable;
if(!t_tKey.equals(tKey)){
findTargetTab(kColumns, t_tKey, sCol, list);
}
}
}
}
}
}
private void addLineStr(KColumn tCol,KColumn sCol,List<String> list){
StringBuffer sb = new StringBuffer();
sb.append(StringUtils.defaultString(sCol.db,""));
sb.append(",");
sb.append(StringUtils.defaultString(sCol.schema,""));
sb.append(",");
sb.append(StringUtils.defaultString(sCol.tableName,""));
sb.append(",");
sb.append(StringUtils.defaultString(sCol.columnName,""));
sb.append(",");
sb.append(StringUtils.defaultString(tCol.db,""));
sb.append(",");
sb.append(StringUtils.defaultString(tCol.schema,""));
sb.append(",");
sb.append(StringUtils.defaultString(tCol.tableName,""));
sb.append(",");
sb.append(StringUtils.defaultString(tCol.columnName,""));
// sb.append(System.getProperty("line.separator"));//jdk1.7以下
// sb.append(System.lineSeparator());
list.add(sb.toString());
}
}

421
src/main/java/com/guozhi/bloodanalysis/parser/utils/ExprToColumn.java

@ -0,0 +1,421 @@
package com.guozhi.bloodanalysis.parser.utils;
import java.awt.Point;
import com.guozhi.bloodanalysis.parser.common.ParserContext;
import com.guozhi.bloodanalysis.parser.common.SelectParser;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import gudusoft.gsqlparser.TCustomSqlStatement;
import gudusoft.gsqlparser.nodes.TAnalyticFunction;
import gudusoft.gsqlparser.nodes.TCaseExpression;
import gudusoft.gsqlparser.nodes.TExpression;
import gudusoft.gsqlparser.nodes.TExpressionList;
import gudusoft.gsqlparser.nodes.TFunctionCall;
import gudusoft.gsqlparser.nodes.TOrderByItem;
import gudusoft.gsqlparser.nodes.TOrderByItemList;
import gudusoft.gsqlparser.nodes.TParseTreeNode;
import gudusoft.gsqlparser.nodes.TParseTreeNodeList;
import gudusoft.gsqlparser.nodes.TResultColumn;
import gudusoft.gsqlparser.nodes.TWhenClauseItem;
import gudusoft.gsqlparser.nodes.TWhenClauseItemList;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;
/**
* Created by Walk.Lai on 2015/7/25.
*/
@Slf4j
@Component
@NoArgsConstructor
@Data
public class ExprToColumn {
private TCustomSqlStatement mStmt;
private ParserContext mParserContext;
@Autowired
ApplicationContext applicationContext;
public ExprToColumn(TCustomSqlStatement stmt, ParserContext context, boolean keepConst) {
mStmt = stmt;
mParserContext = context;
}
/**
* 取得列的别名
* @param attr
* @return
*/
private String getColumnAlias(TExpression attr){
TParseTreeNodeList tokens = attr.getStartToken()
.getNodesStartFromThisToken();
if (tokens != null) {
for (int i = 0; i < tokens.size(); i++) {
TParseTreeNode node = tokens.getElement(i);
if (node instanceof TResultColumn) {
TResultColumn field = (TResultColumn) node;
if (field.getAliasClause() != null) {
return field.getAliasClause().toString();
}
}
}
}
return null;
}
/**
* 把表达式转换成列信息
* 一般来说一个表达式只能生成一列但如果是*的话就可能对应多列
*
* @param attr
* @param stmt
* @return
*/
private KColumn attrToColumn(TExpression attr, TCustomSqlStatement stmt ) {
KColumn column = new KColumn();
//getEndToken()取出来就是没带前缀的表名 t.a as aa =a, t.b bb =b
column.columnName = SpUtils.removeQuote(attr.getEndToken().toString());
if(column.columnName.indexOf("*")!=-1){
column.vColumn=true;
column.isStar=true;
}
//TODO 这一段为什么这样处理
//getExprAlias取出来的不是别名,别名要像下边这样取
String alias=getColumnAlias(attr);
if(alias!=null){
column.alias=alias;
}else{
column.alias=column.columnName;
}
if (attr.toString().indexOf(".") > 0) { //有前缀
column.columnPrefix = SpUtils.removeQuote(attr.toString().substring(0,
attr.toString().lastIndexOf(".")));
}else{
}
new ColumnRefFinder().find(column,mParserContext);
return column;
}
public KColumn exprVisit(TParseTreeNode pNode){
return exprVisit(pNode,true);
}
public KColumn exprVisit(TParseTreeNode pNode,boolean keepConstant) {
if(pNode==null){
return null;
}
TExpression expr = (TExpression) pNode;
KColumn toColumn=new KColumn();
switch (expr.getExpressionType()) {
case simple_object_name_t: //select * 也算这一种情况
return attrToColumn(expr, mStmt);
case simple_constant_t:
//TODO 根据配置判断常量值是否要当作一列
toColumn.columnName=expr.toString();
toColumn.vColumn=true;
toColumn.export=keepConstant;
ColumnRefFinder columnRefFinder = applicationContext.getBean(ColumnRefFinder.class);
columnRefFinder.find(toColumn,mParserContext);
return toColumn;
case subquery_t:
VTable currentTable=mParserContext.getCurrentTable();//暂存
SelectParser sp = applicationContext.getBean(SelectParser.class);
sp.setMSelect(expr.getSubQuery());
sp.setVTable(new VTable(SpUtils.generateId(expr.getSubQuery().toString(),"")));
sp.setMParserContext(mParserContext);
sp.parse();
mParserContext.setCurrentTable(currentTable);//还原
//TODO 别名?
String alias=getColumnAlias(expr);
if(alias!=null){
toColumn.alias=alias;
toColumn.columnName=toColumn.alias;
}else{
Point location = new Point((int) expr.getEndToken().lineNo,
(int) expr.getEndToken().columnNo);
String columnName=SpUtils.generateId(mStmt.toString(), location.toString());
toColumn.alias=columnName;
toColumn.columnName=toColumn.alias;
}
toColumn.addRefColumnList(sp.getParseResult());
return toColumn;
case case_t:
return caseSide(expr);
case logical_and_t:
return analyseBothSide(expr);
case logical_or_t:
return analyseBothSide(expr);
case concatenate_t: //a||b
return analyseBothSide(expr);
case logical_not_t:
return analyseLogical_not_tSide(expr);
case null_t:
return analyseNull_TSide(expr);
case simple_comparison_t: // where a=1 中的 a=1;
return analyseBothSide(expr);
case in_t:
return analyseBothSide(expr);
case arithmetic_plus_t:
return analyseBothSide(expr);
case arithmetic_minus_t:
return analyseBothSide(expr);
case arithmetic_times_t:
return analyseBothSide(expr);
case arithmetic_divide_t:
return analyseBothSide(expr);
case typecast_t:
break;
case unknown_t:
break;
case pattern_matching_t:
break;
case unary_minus_t:
break;
case parenthesis_t:
return analyseNull_TSide(expr);
case function_t:
return functionAnalyse(expr);
case assignment_t://set a=1 中的 a=1
return analyseBothSide(expr);
case simple_source_token_t: // select null
return null;
case arithmetic_modulo_t:
return modSide(expr);
case list_t:
return listSide(expr);
case between_t:
return betweenSide(expr);
default:
log.info("##【留】【意】--没有检测到当前类型 : "+expr.getExpressionType());
return null;
}
return null;
}
private KColumn betweenSide(TExpression expr){
KColumn column = new KColumn();
String alias = getColumnAlias(expr);
column.vColumn = true;
column.columnName = SpUtils.generateId(expr.toString(),"");
if (alias!= null) {
column.alias = alias;
}else{
column.alias = column.columnName;
}
column.addRefColumn(exprVisit(expr.getLeftOperand()));
column.addRefColumn(exprVisit(expr.getRightOperand()));
return column;
}
private KColumn listSide(TExpression expr){
KColumn column = new KColumn();
String alias = getColumnAlias(expr);
column.vColumn = true;
column.columnName = SpUtils.generateId(expr.toString(),"");
if (alias!= null) {
column.alias = alias;
}else{
column.alias = column.columnName;
}
TExpressionList expList = expr.getExprList();
for (int i = 0; i < expList.size(); i++) {
TExpression exp = expList.getExpression(i);
column.addRefColumn(this.exprVisit(exp));
}
return column;
}
private KColumn modSide(TExpression expr){
KColumn column = new KColumn();
String alias = getColumnAlias(expr);
column.vColumn = true;
column.columnName = SpUtils.generateId(expr.toString(),"");
if (alias!= null) {
column.alias = alias;
}else{
column.alias = column.columnName;
}
column.addRefColumn(analyseLeft(expr));
return column;
}
private KColumn caseSide(TExpression expr){
KColumn column = new KColumn();
column.vColumn = true;
column.columnName = SpUtils.generateId(expr.toString(),"");
String alias=getColumnAlias(expr);
if (alias!= null) {
column.alias = alias;
}
TCaseExpression caseExp = expr.getCaseExpression();
// column.columnName = caseExp.toString().replaceAll("\r\n", "");
//情况太复杂了,暂时不考虑case when的具体取值,如需要后期优化
TWhenClauseItemList whenList = caseExp.getWhenClauseItemList();
for (int i = 0; i < whenList.size(); i++) {
TWhenClauseItem item = whenList.getWhenClauseItem(i);
TExpression tExpr = item.getReturn_expr();
TExpression sExpr = item.getComparison_expr();
KColumn tCol = this.exprVisit(tExpr);
column.addRefColumn(tCol);
column.addRefColumn(analyseRight(sExpr));
column.addRefColumn(analyseLeft(sExpr));
}
column.addRefColumn(exprVisit(caseExp.getElse_expr()));
ColumnRefFinder crf = applicationContext.getBean(ColumnRefFinder.class);
crf.find(column,mParserContext);
return column;
}
private KColumn analyseNull_TSide(TExpression expr) {
KColumn column = new KColumn();
column.vColumn = true;
Point location = new Point((int) expr.getEndToken().lineNo,
(int) expr.getEndToken().columnNo);
column.columnName = SpUtils.generateId(mStmt.toString(), location.toString());
String alias=getColumnAlias(expr);
if (alias!= null) {
column.alias = alias;
}else{
column.alias=column.columnName;
}
column.addRefColumn(exprVisit(expr.getLeftOperand()));
return column;
}
private KColumn analyseLogical_not_tSide(TExpression expr) {
KColumn column = new KColumn();
column.vColumn = true;
Point location = new Point((int) expr.getEndToken().lineNo,
(int) expr.getEndToken().columnNo);
column.columnName = SpUtils.generateId(mStmt.toString(), location.toString());
String alias=getColumnAlias(expr);
if (alias!= null) {
column.alias = alias;
}else{
column.alias=column.columnName;
}
column.addRefColumn(exprVisit(expr.getRightOperand()));
return column;
}
private KColumn analyseBothSide(TExpression expr) {
KColumn column = new KColumn();
column.vColumn = true;
Point location = new Point((int) expr.getEndToken().lineNo,
(int) expr.getEndToken().columnNo);
column.columnName = SpUtils.generateId(mStmt.toString(), location.toString());
String alias=getColumnAlias(expr);
if (alias!= null) {
column.alias = alias;
}else{
column.alias=column.columnName;
}
column.addRefColumn(exprVisit(expr.getLeftOperand()));
column.addRefColumn(exprVisit(expr.getRightOperand()));
return column;
}
private KColumn analyseRight(TExpression expr) {
return exprVisit(expr.getRightOperand());
}
private KColumn analyseLeft(TExpression expr) {
return exprVisit(expr.getLeftOperand());
}
private KColumn functionAnalyse(TExpression expr){
KColumn column=new KColumn();
column.vColumn=true;
Point location = new Point((int) expr.getEndToken().lineNo,
(int) expr.getEndToken().columnNo);
String columnName=SpUtils.generateId(mStmt.toString(), location.toString());
column.columnName=columnName;
String alias=getColumnAlias(expr);
if(alias!=null){
column.alias=alias;
}else {
column.alias = columnName;
}
TFunctionCall func = expr.getFunctionCall();
//System.out.println(func.getFunctionName());
switch ((func.getFunctionType())){
case extract_t:
case cast_t:
column.addRefColumn(this.exprVisit(func.getExpr1()));
break;
case trim_t:
column.addRefColumn(this.exprVisit(func.getTrimArgument().getStringExpression()));
break;
case unknown_t:
if (func.getArgs() != null){
for ( int k = 0; k < func.getArgs( ).size( ); k++ ){
TExpression argExpr=func.getArgs( ).getExpression(k);
column.addRefColumn(this.exprVisit(argExpr,false));
}
}else if("DENSE_RANK".equals(func.getFunctionName()+"")||"ROW_NUMBER".equals(func.getFunctionName()+"")){
TAnalyticFunction afun = func.getAnalyticFunction();
//TODO wxl PARTITION BY.. 语句报错
// TExpressionList expressionList = afun.getPartitionBy_ExprList();
// if(expressionList!=null){
// for ( int k = 0; k < expressionList.size( ); k++ ){
// TExpression exp = expressionList.getExpression(k);
// column.addRefColumn(this.exprVisit(exp,false));
column.addRefColumn(null);
// }
// }
}
break;
case rank_t:
TAnalyticFunction analy = func.getAnalyticFunction();
if(analy!=null){
TExpressionList expList = analy.getPartitionBy_ExprList();
if(expList!=null){
for (int i = 0; i < expList.size(); i++) {
TExpression exp = expList.getExpression(i);
column.addRefColumn(this.exprVisit(exp,false));
}
}
}else{
TOrderByItemList odrList = func.getOrderByList();
if(odrList!=null){
for (int i = 0; i < odrList.size(); i++) {
TOrderByItem item = odrList.getOrderByItem(i);
column.addRefColumn(this.exprVisit(item.getSortKey(),false));
}
}
}
break;
case csum_t:
TOrderByItemList odrList = func.getOrderByList();
if(odrList!=null){
for (int i = 0; i < odrList.size(); i++) {
TOrderByItem item = odrList.getOrderByItem(i);
column.addRefColumn(this.exprVisit(item.getSortKey(),false));
}
}
break;
case substring_t:
column.addRefColumn(this.exprVisit(func.getExpr1()));
break;
default:
log.info("##【留】【意】--没有检测到当前函数类型 : "+func);
break;
}
return column;
}
}

76
src/main/java/com/guozhi/bloodanalysis/parser/utils/KColumnProvider.java

@ -0,0 +1,76 @@
package com.guozhi.bloodanalysis.parser.utils;
import com.guozhi.bloodanalysis.mapper.MetaBloodAnalysisMapper;
import com.guozhi.bloodanalysis.parser.common.ParserContext;
import com.guozhi.bloodanalysis.parser.vo.KColumn;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* User: Administrator
* Date: 2015/7/28
* Time: 22:11
*/
@Component
@Scope("prototype")
public class KColumnProvider {
@Autowired
MetaBloodAnalysisMapper metaBloodAnalysisMapper;
public List<KColumn> getColumns(VTable vt , String tableName, ParserContext context) {
if (context != null&&tableName!=null) {
VTable table = context.findTableInCurrentStatement(tableName);
if (table == null) {
table = context.findExistedTable(tableName,false);
}
if (table != null && table.getColumns().size() > 0) {
return table.getColumns();
}else {
String[] arr = tableName.split("\\.");
List<Map<String,String>> colList = metaBloodAnalysisMapper.getColumnsByTable(arr[arr.length-1], context.getDefaultDb(),context.getDefaultSchema());
if(colList.size()>0){
List<KColumn> cols = new ArrayList<KColumn>();
for (Map<String, String> colMap : colList) {
KColumn col = new KColumn();
// col.columnPrefix = vt.alias;
col.columnName = colMap.get("colCode");
col.alias = col.columnName;
col.db = context.getDefaultDb();
col.schema = context.getDefaultSchema();
col.tableName = tableName;
col.isEvTable = true;
cols.add(col);
}
return cols;
}
}
}
if(vt!=null&&vt.tabId!=null){
List<Map<String,String>> colList = metaBloodAnalysisMapper.getColumnsByTabId(vt.tabId, null);
if(colList.size()>0){
List<KColumn> cols = new ArrayList<KColumn>();
for (Map<String, String> colMap : colList) {
KColumn col = new KColumn();
col.columnPrefix = vt.alias;
col.columnName = colMap.get("colCode");
col.alias = col.columnName;
col.db = vt.db;
col.schema = vt.schema;
col.tableName = vt.name;
col.isEvTable = true;
cols.add(col);
}
return cols;
}
}
return null;
}
}

81
src/main/java/com/guozhi/bloodanalysis/parser/utils/KDatabaseProvider.java

@ -0,0 +1,81 @@
package com.guozhi.bloodanalysis.parser.utils;
import com.guozhi.bloodanalysis.mapper.MetaBloodAnalysisMapper;
import com.guozhi.bloodanalysis.parser.common.ParserContext;
import com.guozhi.bloodanalysis.parser.vo.VTable;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* User: Administrator
* Date: 2015/7/28
* Time: 22:11
*/
@Component
@Scope("prototype")
public class KDatabaseProvider {
@Autowired
MetaBloodAnalysisMapper bloodAnalysisMapper;
@Autowired
ApplicationContext applicationContext;
public void getDatabase(VTable vTable, ParserContext context) {
boolean realTable = true;
List<String> schemas = new ArrayList<>();
schemas.add(vTable.schema);
if(schemas.size()>1){
for (String schema : schemas) {
List<Map<String, Object>> sysList = bloodAnalysisMapper.getSystem(schema, vTable.name);
if(sysList.size()>0){
addRealTables(vTable, sysList, schema, context);
realTable = false;
}
}
}else{
List<Map<String, Object>> sysList = bloodAnalysisMapper.getSystem(schemas.get(0), vTable.name);
if(sysList.size()>1){
addRealTables(vTable, sysList, schemas.get(0), context);
}else if(sysList.size()>0){
Map<String,Object> sysMap = sysList.get(0);
vTable.db = (String) sysMap.get("SYS_CODE");
vTable.tabId = (String) sysMap.get("TAB_ID");
KColumnProvider kColumnProvider = applicationContext.getBean(KColumnProvider.class);
vTable.addColumns(kColumnProvider.getColumns(vTable, null, context));
}
}
//如果是多个,无法准确的明确出来
vTable.setRealTable(realTable);
}
private void addRealTables(VTable vTable , List<Map<String, Object>> sysList,String schema, ParserContext context){
int i = 1;
for (Map<String, Object> map : sysList) {
String sysCode = (String)map.get("SYS_CODE");
String schCode = (String)map.get("SCH_CODE");
String tabCode = (String)map.get("TAB_CODE");
VTable vt = new VTable(tabCode);
vt.db = sysCode;
vt.schema = schCode;
vt.alias = "T_"+schema+"_"+(i++);
Integer tabId = (Integer) map.get("TAB_ID");
vt.tabId = String.valueOf(tabId);
vt.setRealTable(true);
KColumnProvider kColumnProvider = applicationContext.getBean(KColumnProvider.class);
vt.addColumns(kColumnProvider.getColumns(vt, null, context));
vTable.addTable(vt);
vTable.addColumns(vt.getColumns());
}
}
}

49
src/main/java/com/guozhi/bloodanalysis/parser/utils/PartitionTool.java

@ -0,0 +1,49 @@
package com.guozhi.bloodanalysis.parser.utils;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class PartitionTool {
private static Log logger = LogFactory.getLog(PartitionTool.class);
public void truncatePartitionIfExistOrAddPartition(Connection con,Map<String,String> param){
truncatePartitionIfExist(con,param);
addPartitionIfNoExist(con,param);
}
public boolean truncatePartitionIfExist(Connection con,Map<String,String> map) {
String produceSql = "{call droppartition(?,?,?) }";
boolean bool = execute(con,produceSql,map);
return bool;
}
public boolean addPartitionIfNoExist(Connection con,Map<String,String> map) {
String produceSql = "{call addpartition(?,?,?) }";
boolean bool = execute(con,produceSql,map);
return bool;
}
private boolean execute(Connection con,String produceSql,Map<String,String> map){
String scheName = map.get("scheName");
String tableName = map.get("tarTable");
String partitionName = map.get("partitionName");
logger.info("调用"+partitionName+"存储过程,参数为"+"["+scheName+","+tableName+","+partitionName+"]");
try {
CallableStatement call = con.prepareCall(produceSql);
call.setString(1, scheName);
call.setString(2, tableName);
call.setString(3, partitionName);
return call.execute();
} catch (SQLException e) {
logger.error("调用"+partitionName+"存储过程失败,参数为"+"["+scheName+","+tableName+","+partitionName+"]",e);
throw new RuntimeException(e);
}
}
}

123
src/main/java/com/guozhi/bloodanalysis/parser/utils/SpUtils.java

@ -0,0 +1,123 @@
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
package com.guozhi.bloodanalysis.parser.utils;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.text.SimpleDateFormat;
import lombok.extern.slf4j.Slf4j;
import sun.misc.BASE64Encoder;
@Slf4j
public class SpUtils {
public static String removeQuote(String string) {
if (string != null && string.indexOf('.') != -1) {
String[] splits = string.split("\\.");
StringBuffer result = new StringBuffer();
for (int i = 0; i < splits.length; i++) {
result.append(removeQuote(splits[i]));
if (i < splits.length - 1) {
result.append(".");
}
}
return result.toString();
}
if (string != null && string.startsWith("\"") && string.endsWith("\""))
return string.substring(1, string.length() - 1);
return string;
}
public static String generateId(String wholeSQL, String idStr) {
//确定计算方法
StringBuilder sb = new StringBuilder(wholeSQL).append(idStr);
try {
MessageDigest md5 = MessageDigest.getInstance("MD5");
BASE64Encoder base64en = new BASE64Encoder();
//加密后的字符串
return base64en.encode(md5.digest(sb.toString().getBytes("utf-8")));
}catch ( NoSuchAlgorithmException e){
log.error(e.getMessage());
}catch (UnsupportedEncodingException e){
log.error(e.getMessage());
}
return sb.toString();
}
public static String[] analyseTableName(String name){
name=removeQuote(name);
String[] arr=name.split("\\.",3);
String[] ret=new String[]{null,null,null,null}; //db.schema.table.column
if(arr.length==1){ //只有表名
ret[2]=arr[0];
}else if(arr.length==2){//schema.table
ret[0]=arr[0];
ret[1]=arr[0];
ret[2]=arr[1];
}else if(arr.length==3) {//db.schema.table
ret[0]=arr[0];
ret[1]=arr[1];
ret[2]=arr[2];
}
return ret;
}
public static void deleteDir(String filePath){
/*try {
// 创建多层目录
if(file.isDirectory()){
String[] children = file.list();
//递归删除目录中的子目录下
for (int i=0; i<children.length; i++) {
deleteDir(new File(file, children[i]));
}
}
file.delete();
} catch (IOException e) {
throw new Exception("目标文件未找到", e);
}*/
java.util.Date currentTime = new java.util.Date();
SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd");
String currDateStr = formatter.format(currentTime);
File file = new File(filePath);
String[] tempList = file.list();
File temp = null;
if(tempList!=null){
for (int i = 0; i < tempList.length; i++) {
if(!currDateStr.equals(tempList[i])){
if (filePath.endsWith(File.separator)) {
temp = new File(filePath + tempList[i]);
} else {
temp = new File(filePath + File.separator + tempList[i]);
}
if (temp.isFile()) {
temp.delete();
}
if (temp.isDirectory()) {
deleteDir(filePath + "/" + tempList[i]);// 先删除文件夹里面的文件
temp.delete();
}
}
}
}
}
public static String getNowDate(String format){
java.util.Date currentTime = new java.util.Date();
SimpleDateFormat formatter = new SimpleDateFormat(format);
String currDateStr = formatter.format(currentTime);
return currDateStr;
}
}

9
src/main/java/com/guozhi/bloodanalysis/parser/vo/ClauseType.java

@ -0,0 +1,9 @@
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
package com.guozhi.bloodanalysis.parser.vo;
public enum ClauseType {
connectby, groupby, join, orderby, select, startwith, undefine, insert, delete, update, where
}

124
src/main/java/com/guozhi/bloodanalysis/parser/vo/ExcelCell.java

@ -0,0 +1,124 @@
package com.guozhi.bloodanalysis.parser.vo;
/**
* excel单元格封装类
* @author HeChangZhi
*
*/
public class ExcelCell {
private String value;//单元格值
private Integer firstRow;//起始行
private Integer lastRow;//结束行
private Integer firstCol;//起始列
private Integer lastCol;//结束列
private String type;
private String mergeDown = "0";
private String mergeAcross = "0";
private String index = "";
private String style = "";
public ExcelCell(String value) {
super();
this.value = value;
}
public ExcelCell(String value, Integer firstRow, Integer lastRow, Integer firstCol, Integer lastCol) {
super();
this.value = value;
this.firstRow = firstRow;
this.lastRow = lastRow;
this.firstCol = firstCol;
this.lastCol = lastCol;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public Integer getFirstRow() {
return firstRow;
}
public void setFirstRow(Integer firstRow) {
this.firstRow = firstRow;
}
public Integer getLastRow() {
return lastRow;
}
public void setLastRow(Integer lastRow) {
this.lastRow = lastRow;
}
public Integer getFirstCol() {
return firstCol;
}
public void setFirstCol(Integer firstCol) {
this.firstCol = firstCol;
}
public Integer getLastCol() {
return lastCol;
}
public void setLastCol(Integer lastCol) {
this.lastCol = lastCol;
}
public String getMergeDown() {
return mergeDown;
}
public void setMergeDown(String mergeDown) {
this.mergeDown = mergeDown;
}
public String getMergeAcross() {
return mergeAcross;
}
public void setMergeAcross(String mergeAcross) {
this.mergeAcross = mergeAcross;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getIndex() {
return index;
}
public void setIndex(String index) {
this.index = index;
}
public String getStyle() {
return style;
}
public void setStyle(String style) {
this.style = style;
}
}

72
src/main/java/com/guozhi/bloodanalysis/parser/vo/ExcelSheet.java

@ -0,0 +1,72 @@
package com.guozhi.bloodanalysis.parser.vo;
import java.util.List;
import java.util.Map;
/**
* excel sheet封装类
* @author HeChangZhi
*
*/
public class ExcelSheet {
/*
* sheet名称
*/
private String sheetName;
/*
* 表头是否合并单元格
*/
private boolean merged;
/*
* 表头
* key为对象属性名
* value为表头名称
*/
private Map<String, ExcelCell> titles;
/*
* excel数据
*/
//private List<?> datas;
private List<Map<String, Object>> datas;
public ExcelSheet(String sheetName, boolean merged, Map<String, ExcelCell> titles, List<Map<String, Object>> datas) {
super();
this.sheetName = sheetName;
this.merged = merged;
this.titles = titles;
this.datas = datas;
}
public String getSheetName() {
return sheetName;
}
public void setSheetName(String sheetName) {
this.sheetName = sheetName;
}
public boolean isMerged() {
return merged;
}
public void setMerged(boolean merged) {
this.merged = merged;
}
public Map<String, ExcelCell> getTitles() {
return titles;
}
public void setTitles(Map<String, ExcelCell> titles) {
this.titles = titles;
}
public List<Map<String, Object>> getDatas() {
return datas;
}
public void setDatas(List<Map<String, Object>> datas) {
this.datas = datas;
}
}

146
src/main/java/com/guozhi/bloodanalysis/parser/vo/KColumn.java

@ -0,0 +1,146 @@
/*
Copyright 2015 Dell Inc.
ALL RIGHTS RESERVED.
*/
package com.guozhi.bloodanalysis.parser.vo;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class KColumn {
/**
* 数据库标识
*/
public String db;
/**
* 模式Schema
*/
public String schema;
/**
* 表名称
*/
public String tableName;
/**
* 字段前缀
*/
public String columnPrefix;
/**
* 字段名称--业务主键
*/
public String columnName;
/**
* 字段别名
*/
public String alias;//只在同一个语句中起作用,上下文中该字段没意义
public boolean isEvTable = true;
public boolean vColumn = false;
public boolean isStar=false;
public boolean export=false;
private List<KColumn> refColumns = new ArrayList<KColumn>(); //必须使用列表以确保顺序
private Set<KColumn> whereColumns=new HashSet<KColumn>();
/**
* etl标志记录源标志
*/
public String etlFlag;
/**
* grc etl标志字段名称
*/
public final static String etlSrcTabName="ETL_SRC_TBL_NAME";
public KColumn() {
}
public void addRefColumn(KColumn column) {
if(column!=null){
for(KColumn col: refColumns){
if(StringUtils.equals(col.db,column.db)&&StringUtils.equals(col.schema,column.schema)
&&StringUtils.equals(col.tableName,column.tableName)
&&StringUtils.equals(col.columnName,column.columnName)
&& StringUtils.equals(col.columnPrefix,column.columnPrefix)){
//已存在,不再次加入
return;
}
}
this.refColumns.add(column);
}
}
public void addRefColumnList(Collection<KColumn> columns) {
if(columns!=null)
this.refColumns.addAll(columns);
}
public void setRefColumnList(List<KColumn> columns) {
if(columns!=null)
this.refColumns = columns;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(db+"."+schema+"."+tableName + "." + columnName );
if(alias!=null){
sb.append(" as "+alias);
}
if (refColumns.size() > 0) {
sb.append(" <-- "+ refColumns.toString());
}
return sb.toString();
}
public String toCypherString(){
StringBuilder sb=new StringBuilder();
sb.append("{name:\"");
sb.append(columnName);
sb.append("\",db:\"");
sb.append(db);
sb.append("\",schema:\"");
sb.append(schema);
sb.append("\",tableName:\"");
sb.append(tableName);
// sb.append("\",vColumn:\"");
// sb.append(vColumn);
// sb.append("\",isStar:\"");
// sb.append(isStar);
sb.append("\",export:\"");
sb.append(export);
sb.append("\"}");
return sb.toString();
}
public String queryDependsString(){
StringBuilder sb=new StringBuilder();
sb.append("Match (c:Column {name:\"");
sb.append(columnName);
sb.append("\",db:\"");
sb.append(db);
sb.append("\",schema:\"");
sb.append(schema);
sb.append("\",tableName:\"");
sb.append(tableName);
sb.append("\",export:'true'}),\n (c)-[:DEPENDS*1..10]->(c1 {export:'true'}) return c1");
return sb.toString();
}
public List<KColumn> getRefColumns() {
return refColumns;
}
public String getEtlFlag() {
return etlFlag;
}
public void setEtlFlag(String etlFlag) {
this.etlFlag = etlFlag;
}
}

141
src/main/java/com/guozhi/bloodanalysis/parser/vo/VTable.java

@ -0,0 +1,141 @@
package com.guozhi.bloodanalysis.parser.vo;
import java.util.ArrayList;
import java.util.List;
/**
* Created by Walk.Lai on 2015/8/2.
*/
public class VTable {
/**
* 数据库中的表instId
*/
public String tabId;
/**
* 数据库标识
*/
public String db;
/**
* 数据库Schema
*/
public String schema;
/**
* 数据库表名
*/
public String name;
/**
* 表别名
*/
public String alias;// 只在同一个语句中起作用,上下文中该字段没意义
/**
* 是否被依赖的表
*/
private boolean realTable = false;
/**
* 是否实体表相对于临时表来讲 false临时表true实体表
*/
private boolean created = true;
/**
* 表字段集合
*/
private List<KColumn> columns = new ArrayList<>();
/**
* 来源表集合
*/
private List<VTable> fromTables = new ArrayList<>();
/**
* grc 记录级解析标志
*/
public String etlFlag;
public VTable(String name) {
this.name = name;
}
public VTable(String name, String alias) {
this.name = name;
this.alias = alias;
}
public String getName() {
return name;
}
public String getFullName() {
StringBuilder sb = new StringBuilder();
sb.append(db);
sb.append(".");
if (schema != null) {
sb.append(schema);
sb.append(".");
}
sb.append(name);
return sb.toString();
}
public List<KColumn> getColumns() {
return columns;
}
public List<VTable> getFromTables() {
return fromTables;
}
public void addColumn(KColumn column) {
columns.add(column);
}
public void addColumns(List<KColumn> column) {
if (column != null) {
columns.addAll(column);
}
}
public void emptyColumns() {
columns.clear();
}
public void addTable(VTable table) {
fromTables.add(table);
}
public boolean isRealTable() {
return realTable;
}
public void setRealTable(boolean realTable) {
this.realTable = realTable;
}
public String getAlias() {
return alias;
}
public void setAlias(String alias) {
this.alias = alias;
}
public boolean isCreated() {
return created;
}
public void setCreated(boolean created) {
this.created = created;
}
public String toCypherString() {
StringBuilder sb = new StringBuilder();
sb.append("{db:\"");
sb.append(db);
sb.append("\",schema:\"");
sb.append(schema);
sb.append("\",name:\"");
sb.append(name);
sb.append("\",realTable:\"");
sb.append(realTable);
sb.append("\",created:\"");
sb.append(created);
sb.append("\"}");
return sb.toString();
}
}

110
src/main/java/com/guozhi/bloodanalysis/service/BloodAnalysisService.java

@ -0,0 +1,110 @@
package com.guozhi.bloodanalysis.service;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.github.pagehelper.PageHelper;
import com.guozhi.bloodanalysis.entity.DataLineageInfo;
import com.guozhi.bloodanalysis.mapper.DataLineageInfoMapper;
import com.guozhi.bloodanalysis.mapper.MetaBloodAnalysisMapper;
import com.guozhi.bloodanalysis.parser.SqlParser;
import com.guozhi.bloodanalysis.utils.RedisUtils;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import javax.servlet.http.HttpServletRequest;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
public class BloodAnalysisService {
@Autowired
DataLineageInfoMapper dataLineageInfoMapper;
@Autowired
ApplicationContext applicationContext;
@Value("${blood-analysis.pageNum}")
Integer pageNum;
@Value("${blood-analysis.pageSize}")
Integer pageSize;
@Value("${databaseUrl}")
String databaseUrl;
@Autowired
MetaBloodAnalysisMapper mapper;
@Autowired
RedisUtils redisUtils;
// @Autowired
// HttpServletRequest request;
@Async
public void analysis(String dashUserName,String dashPassword) {
redisUtils.set("startBloodAnalysis",true);
try (CloseableHttpClient httpClient = HttpClients.createDefault()) {
// 1. 创建 POST 请求
HttpGet get = new HttpGet(databaseUrl);
// 2. 设置请求头(可选)
get.setHeader("Content-Type", "application/json");
get.setHeader("dashUserName",dashUserName);
get.setHeader("dashPassword",dashPassword);
// 4. 执行请求
try (CloseableHttpResponse response = httpClient.execute(get)) {
// 5. 获取响应
HttpEntity entity = response.getEntity();
if (entity != null) {
String responseStr = EntityUtils.toString(entity);
JSONObject obj = JSON.parseObject(responseStr);
JSONObject data = obj.getJSONObject("data");
JSONArray totalList = data.getJSONArray("totalList");
List<Map<String,String>> list = new ArrayList<>();
if (totalList !=null && totalList.size()>0){
for (int i = 0; i < totalList.size(); i++) {
JSONObject database = totalList.getJSONObject(i);
Map<String,String> map = new HashMap<>();
map.put(database.getString("name"),database.getString("type"));
list.add(map);
}
mapper.deleteAllBloodData();
parse(pageNum,pageSize,list);
}
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
redisUtils.set("startBloodAnalysis",false);
}
}
public void parse(Integer pageNum, Integer pageSize, List<Map<String,String>> databaseList) throws Exception {
PageHelper.startPage(pageNum,pageSize);
List<DataLineageInfo> list = dataLineageInfoMapper.search();
if (list != null && list.size()>0){
//访问dolphinscheduler接口,获取数据库名称以及对应的类型列表
for (DataLineageInfo dataLineageInfo : list) {
SqlParser sqlParser = applicationContext.getBean(SqlParser.class);
sqlParser.parse(dataLineageInfo,databaseList);
}
}
if (list != null && list.size() == pageSize){
parse(pageNum+1,pageSize,databaseList);
}
}
}

81
src/main/java/com/guozhi/bloodanalysis/utils/ApiResult.java

@ -0,0 +1,81 @@
package com.guozhi.bloodanalysis.utils;
public class ApiResult <T>{
private Integer code; // 状态码
private String msg; // 返回的信息
private T data; // 返回的数据
/**
* 成功时候的调用有数据
* @param data
* @param <T>
* @return
*/
public static <T> ApiResult<T> success(T data){
return new ApiResult<T>(data);
}
/**
* 成功时候的调用无数据
* @param <T>
* @return
*/
public static <T> ApiResult<T> success(){
return new ApiResult<T>();
}
/**
* 异常时候的调用有msg参数
* @param msg
* @param <T>
* @return
*/
public static <T> ApiResult<T> error(Integer code,String msg){
return new ApiResult<T>(code,msg);
}
public static <T> ApiResult<T> error(String msg){
return new ApiResult<T>(msg);
}
/**
* 异常时候的调用无msg参数
* @param <T>
* @return
*/
public static <T> ApiResult<T> error(){
return new ApiResult<T>("error");
}
private ApiResult(T data) {
this.code = 200;
this.msg = "success";
this.data = data;
}
private ApiResult() {
this.code = 200;
this.msg = "success";
}
private ApiResult(String msg) {
this.code = 400;
this.msg = msg;
}
private ApiResult(Integer code,String msg) {
this.code = code;
this.msg = msg;
}
public Integer getCode() {
return code;
}
public String getMsg() {
return msg;
}
public T getData() {
return data;
}
}

15
src/main/java/com/guozhi/bloodanalysis/utils/DateUtils.java

@ -0,0 +1,15 @@
package com.guozhi.bloodanalysis.utils;
import org.springframework.stereotype.Component;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
@Component
public class DateUtils {
public String getFormateByDate(Date date){
DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
return sdf.format(date);
}
}

31
src/main/java/com/guozhi/bloodanalysis/utils/EncryptUtils.java

@ -0,0 +1,31 @@
package com.guozhi.bloodanalysis.utils;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
public class EncryptUtils {
public static String encryptSHA256(String str){
return sha("SHA-256",str);
}
private static String sha(String type,String str){
String result = null;
try {
MessageDigest md = MessageDigest.getInstance(type);
md.update(str.getBytes());
byte[] byteBuffer = md.digest();
StringBuilder strHexString = new StringBuilder();
for (byte b : byteBuffer) {
String hex = Integer.toHexString(0xff & b);
if (hex.length() == 1) {
strHexString.append('0');
}
strHexString.append(hex);
}
result = strHexString.toString();
}catch (NoSuchAlgorithmException e){
e.printStackTrace();
}
return result;
}
}

588
src/main/java/com/guozhi/bloodanalysis/utils/RedisUtils.java

@ -0,0 +1,588 @@
package com.guozhi.bloodanalysis.utils;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import javax.annotation.Resource;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
@Component
public final class RedisUtils {
@Resource
private RedisTemplate<String, Object> redisTemplate;
// =============================common============================
/**
* 指定缓存失效时间
*
* @param key
* @param time 时间()
*/
public boolean expire(String key, long time) {
try {
if (time > 0) {
redisTemplate.expire(key, time, TimeUnit.SECONDS);
}
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 根据key 获取过期时间
*
* @param key 不能为null
* @return 时间() 返回0代表为永久有效
*/
public long getExpire(String key) {
return redisTemplate.getExpire(key, TimeUnit.SECONDS);
}
/**
* 判断key是否存在
*
* @param key
* @return true 存在 false不存在
*/
public boolean hasKey(String key) {
try {
return redisTemplate.hasKey(key);
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 删除缓存
*
* @param key 可以传一个值 或多个
*/
@SuppressWarnings("unchecked")
public void del(String... key) {
if (key != null && key.length > 0) {
if (key.length == 1) {
redisTemplate.delete(key[0]);
} else {
redisTemplate.delete((Collection<String>) CollectionUtils.arrayToList(key));
}
}
}
// ============================String=============================
/**
* 普通缓存获取
*
* @param key
* @return
*/
public Object get(String key) {
return key == null ? null : redisTemplate.opsForValue().get(key);
}
/**
* 普通缓存放入
*
* @param key
* @param value
* @return true成功 false失败
*/
public boolean set(String key, Object value) {
try {
redisTemplate.opsForValue().set(key, value);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 普通缓存放入并设置时间
*
* @param key
* @param value
* @param time 时间() time要大于0 如果time小于等于0 将设置无限期
* @return true成功 false 失败
*/
public boolean set(String key, Object value, long time) {
try {
if (time > 0) {
redisTemplate.opsForValue().set(key, value, time, TimeUnit.SECONDS);
} else {
set(key, value);
}
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 递增
*
* @param key
* @param delta 要增加几(大于0)
*/
public long incr(String key, long delta) {
if (delta < 0) {
throw new RuntimeException("递增因子必须大于0");
}
return redisTemplate.opsForValue().increment(key, delta);
}
/**
* 递减
*
* @param key
* @param delta 要减少几(小于0)
*/
public long decr(String key, long delta) {
if (delta < 0) {
throw new RuntimeException("递减因子必须大于0");
}
return redisTemplate.opsForValue().increment(key, -delta);
}
// ================================Map=================================
/**
* HashGet
*
* @param key 不能为null
* @param item 不能为null
*/
public Object hget(String key, String item) {
return redisTemplate.opsForHash().get(key, item);
}
/**
* 获取hashKey对应的所有键值
*
* @param key
* @return 对应的多个键值
*/
public Map<Object, Object> hmget(String key) {
return redisTemplate.opsForHash().entries(key);
}
/**
* HashSet
*
* @param key
* @param map 对应多个键值
*/
public boolean hmset(String key, Map<String, Object> map) {
try {
redisTemplate.opsForHash().putAll(key, map);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* HashSet 并设置时间
*
* @param key
* @param map 对应多个键值
* @param time 时间()
* @return true成功 false失败
*/
public boolean hmset(String key, Map<String, Object> map, long time) {
try {
redisTemplate.opsForHash().putAll(key, map);
if (time > 0) {
expire(key, time);
}
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 向一张hash表中放入数据,如果不存在将创建
*
* @param key
* @param item
* @param value
* @return true 成功 false失败
*/
public boolean hset(String key, String item, Object value) {
try {
redisTemplate.opsForHash().put(key, item, value);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 向一张hash表中放入数据,如果不存在将创建
*
* @param key
* @param item
* @param value
* @param time 时间() 注意:如果已存在的hash表有时间,这里将会替换原有的时间
* @return true 成功 false失败
*/
public boolean hset(String key, String item, Object value, long time) {
try {
redisTemplate.opsForHash().put(key, item, value);
if (time > 0) {
expire(key, time);
}
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 删除hash表中的值
*
* @param key 不能为null
* @param item 可以使多个 不能为null
*/
public void hdel(String key, Object... item) {
redisTemplate.opsForHash().delete(key, item);
}
/**
* 判断hash表中是否有该项的值
*
* @param key 不能为null
* @param item 不能为null
* @return true 存在 false不存在
*/
public boolean hHasKey(String key, String item) {
return redisTemplate.opsForHash().hasKey(key, item);
}
/**
* hash递增 如果不存在,就会创建一个 并把新增后的值返回
*
* @param key
* @param item
* @param by 要增加几(大于0)
*/
public double hincr(String key, String item, double by) {
return redisTemplate.opsForHash().increment(key, item, by);
}
/**
* hash递减
*
* @param key
* @param item
* @param by 要减少记(小于0)
*/
public double hdecr(String key, String item, double by) {
return redisTemplate.opsForHash().increment(key, item, -by);
}
// ============================set=============================
/**
* 根据key获取Set中的所有值
*
* @param key
*/
public Set<Object> sGet(String key) {
try {
return redisTemplate.opsForSet().members(key);
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
/**
* 根据value从一个set中查询,是否存在
*
* @param key
* @param value
* @return true 存在 false不存在
*/
public boolean sHasKey(String key, Object value) {
try {
return redisTemplate.opsForSet().isMember(key, value);
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 将数据放入set缓存
*
* @param key
* @param values 可以是多个
* @return 成功个数
*/
public long sSet(String key, Object... values) {
try {
return redisTemplate.opsForSet().add(key, values);
} catch (Exception e) {
e.printStackTrace();
return 0;
}
}
/**
* 将set数据放入缓存
*
* @param key
* @param time 时间()
* @param values 可以是多个
* @return 成功个数
*/
public long sSetAndTime(String key, long time, Object... values) {
try {
Long count = redisTemplate.opsForSet().add(key, values);
if (time > 0)
expire(key, time);
return count;
} catch (Exception e) {
e.printStackTrace();
return 0;
}
}
/**
* 获取set缓存的长度
*
* @param key
*/
public long sGetSetSize(String key) {
try {
return redisTemplate.opsForSet().size(key);
} catch (Exception e) {
e.printStackTrace();
return 0;
}
}
/**
* 移除值为value的
*
* @param key
* @param values 可以是多个
* @return 移除的个数
*/
public long setRemove(String key, Object... values) {
try {
Long count = redisTemplate.opsForSet().remove(key, values);
return count;
} catch (Exception e) {
e.printStackTrace();
return 0;
}
}
// ===============================list=================================
/**
* 获取list缓存的内容
*
* @param key
* @param start 开始
* @param end 结束 0 -1代表所有值
*/
public List<Object> lGet(String key, long start, long end) {
try {
return redisTemplate.opsForList().range(key, start, end);
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
/**
* 获取list缓存的长度
*
* @param key
*/
public long lGetListSize(String key) {
try {
return redisTemplate.opsForList().size(key);
} catch (Exception e) {
e.printStackTrace();
return 0;
}
}
/**
* 通过索引 获取list中的值
*
* @param key
* @param index 索引 index>=0时 0 表头1 第二个元素依次类推index<0时-1表尾-2倒数第二个元素依次类推
*/
public Object lGetIndex(String key, long index) {
try {
return redisTemplate.opsForList().index(key, index);
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
/**
* 将list放入缓存
*
* @param key
* @param value
*/
public boolean lSet(String key, Object value) {
try {
redisTemplate.opsForList().rightPush(key, value);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 将list放入缓存
*
* @param key
* @param value
* @param time 时间()
*/
public boolean lSet(String key, Object value, long time) {
try {
redisTemplate.opsForList().rightPush(key, value);
if (time > 0)
expire(key, time);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 将list放入缓存
*
* @param key
* @param value
* @return
*/
public boolean lSet(String key, List<Object> value) {
try {
redisTemplate.opsForList().rightPushAll(key, value);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 将list放入缓存
*
* @param key
* @param value
* @param time 时间()
* @return
*/
public boolean lSet(String key, List<Object> value, long time) {
try {
redisTemplate.opsForList().rightPushAll(key, value);
if (time > 0)
expire(key, time);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 根据索引修改list中的某条数据
*
* @param key
* @param index 索引
* @param value
* @return
*/
public boolean lUpdateIndex(String key, long index, Object value) {
try {
redisTemplate.opsForList().set(key, index, value);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
/**
* 移除N个值为value
*
* @param key
* @param count 移除多少个
* @param value
* @return 移除的个数
*/
public long lRemove(String key, long count, Object value) {
try {
Long remove = redisTemplate.opsForList().remove(key, count, value);
return remove;
} catch (Exception e) {
e.printStackTrace();
return 0;
}
}
}

74
src/main/resources/application.yml

@ -0,0 +1,74 @@
server:
port: 8082
servlet:
context-path: /bloodAnalysis
spring:
main:
allow-circular-references: true
application:
name: blood-analysis
data:
redis:
repositories:
enabled: false
datasource:
type: com.alibaba.druid.pool.DruidDataSource
initialSize: 10
minIdle: 10
maxActive: 200
# 配置获取连接等待超时的时间
maxWait: 60000
# 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
timeBetweenEvictionRunsMillis: 60000
# 配置一个连接在池中最小生存的时间,单位是毫秒
minEvictableIdleTimeMillis: 30000
validationQuery: select 'x'
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
# 打开PSCache,并且指定每个连接上PSCache的大小
poolPreparedStatements: true
maxPoolPreparedStatementPerConnectionSize: 20
# 配置监控统计拦截的filters
filters: stat,wall,slf4j
# 通过connectProperties属性来打开mergeSql功能;慢SQL记录
connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
redis:
host: 192.168.0.3
# host: 127.0.0.1
port: 6379
master:
datasource:
# url: jdbc:mysql://192.168.0.3:3306/TrunkSystem?useUnicode=true&characterEncoding=utf-8&useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
# username: admin
# password: 123456
url: jdbc:mysql://47.113.147.166:3306/vfa_test_0115?useUnicode=true&characterEncoding=utf-8&useSSL=false&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true
username: dbf
password: 1q2w3e4r
driverClassName: com.mysql.cj.jdbc.Driver
mybatis:
mapper-locations: classpath:mapper/*.xml
type-aliases-package: com.xgqc.dispatch.entity
configuration:
# log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
log-impl: org.apache.ibatis.logging.log4j2.Log4j2Impl
type-handlers-package: com.xgqc.dispatch.typeHandler
pagehelper:
# helper-dialect: mysql
params: count=countSql
reasonable: true
support-methods-arguments: true
auto-runtime-dialect: true
auto-dialect: true
logging:
config: classpath:log4j2-dev.xml
level:
com.xgqc.dispatch.mapper: DEBUG
blood-analysis:
pageNum: 1
pageSize: 1000
databaseUrl: http://47.121.207.11:12345/dolphinscheduler/datasources/withpwdlist?pageNo=1&pageSize=100

25
src/main/resources/banner.txt

@ -0,0 +1,25 @@
develop by yfxue
////////////////////////////////////////////////////////////////////
// _ooOoo_ //
// o8888888o //
// 88" . "88 //
// (| ^_^ |) //
// O\ = /O //
// ____/`---'\____ //
// .' \\| |// `. //
// / \\||| : |||// \ //
// / _||||| -:- |||||- \ //
// | | \\\ - /// | | //
// | \_| ''\---/'' | | //
// \ .-\__ `-` ___/-. / //
// ___`. .' /--.--\ `. . ___ //
// ."" '< `.___\_<|>_/___.' >'"". //
// | | : `- \`.;`\ _ /`;.`/ - ` : | | //
// \ \ `-. \_ __\ /__ _/ .-` / / //
// ========`-.____`-.___\_____/___.-`____.-'======== //
// `=---=' //
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ //
// 佛祖保佑 永不宕机 永无BUG //
////////////////////////////////////////////////////////////////////
${AnsiColor.BRIGHT_BLUE}
::: Project (version:${application.version}) ::: \(^O^)/ Spring-Boot ${spring-boot.version}

101
src/main/resources/log4j2-dev.xml

@ -0,0 +1,101 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--Configuration后面的status,这个用于设置log4j2自身内部的信息输出,可以不设置,当设置成trace时,你会看到log4j2内部各种详细输出-->
<!--monitorInterval:Log4j能够自动检测修改配置 文件和重新配置本身,设置间隔秒数-->
<configuration monitorInterval="5">
<!--日志级别以及优先级排序: OFF > FATAL > ERROR > WARN > INFO > DEBUG > TRACE > ALL -->
<!--变量配置-->
<Properties>
<!-- 格式化输出:%date表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度 %msg:日志消息,%n是换行符-->
<!-- %logger{36} 表示 Logger 名字最长36个字符 -->
<property name="LOG_PATTERN" value="%date{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n" />
<!-- 定义日志存储的路径,不要配置相对路径 -->
<property name="FILE_PATH" value="./logs/" />
<property name="FILE_NAME" value="backend" />
</Properties>
<appenders>
<console name="Console" target="SYSTEM_OUT">
<!--输出日志的格式-->
<PatternLayout pattern="${LOG_PATTERN}"/>
<!--控制台只输出level及其以上级别的信息(onMatch),其他的直接拒绝(onMismatch)-->
<ThresholdFilter level="DEBUG" onMatch="ACCEPT" onMismatch="DENY"/>
</console>
<!--文件会打印出所有信息,这个log每次运行程序会自动清空,由append属性决定,适合临时测试用-->
<File name="Filelog" fileName="${FILE_PATH}/test.log" append="false">
<PatternLayout pattern="${LOG_PATTERN}"/>
</File>
<!-- 这个会打印出所有的info及以下级别的信息,每次大小超过size,则这size大小的日志会自动存入按年份-月份建立的文件夹下面并进行压缩,作为存档-->
<RollingFile name="RollingFileInfo" fileName="${FILE_PATH}/info.log" filePattern="${FILE_PATH}/${FILE_NAME}-INFO-%d{yyyy-MM-dd}_%i.log.gz">
<!--控制台只输出level及以上级别的信息(onMatch),其他的直接拒绝(onMismatch)-->
<ThresholdFilter level="info" onMatch="ACCEPT" onMismatch="DENY"/>
<PatternLayout pattern="${LOG_PATTERN}"/>
<Policies>
<!--interval属性用来指定多久滚动一次,默认是1 hour-->
<TimeBasedTriggeringPolicy interval="1"/>
<SizeBasedTriggeringPolicy size="10MB"/>
</Policies>
<!-- DefaultRolloverStrategy属性如不设置,则默认为最多同一文件夹下7个文件开始覆盖-->
<DefaultRolloverStrategy max="15"/>
</RollingFile>
<!-- 这个会打印出所有的warn及以下级别的信息,每次大小超过size,则这size大小的日志会自动存入按年份-月份建立的文件夹下面并进行压缩,作为存档-->
<RollingFile name="RollingFileWarn" fileName="${FILE_PATH}/warn.log" filePattern="${FILE_PATH}/${FILE_NAME}-WARN-%d{yyyy-MM-dd}_%i.log.gz">
<!--控制台只输出level及以上级别的信息(onMatch),其他的直接拒绝(onMismatch)-->
<ThresholdFilter level="warn" onMatch="ACCEPT" onMismatch="DENY"/>
<PatternLayout pattern="${LOG_PATTERN}"/>
<Policies>
<!--interval属性用来指定多久滚动一次,默认是1 hour-->
<TimeBasedTriggeringPolicy interval="1"/>
<SizeBasedTriggeringPolicy size="10MB"/>
</Policies>
<!-- DefaultRolloverStrategy属性如不设置,则默认为最多同一文件夹下7个文件开始覆盖-->
<DefaultRolloverStrategy max="15"/>
</RollingFile>
<!-- 这个会打印出所有的error及以下级别的信息,每次大小超过size,则这size大小的日志会自动存入按年份-月份建立的文件夹下面并进行压缩,作为存档-->
<RollingFile name="RollingFileError" fileName="${FILE_PATH}/error.log" filePattern="${FILE_PATH}/${FILE_NAME}-ERROR-%d{yyyy-MM-dd}_%i.log.gz">
<!--控制台只输出level及以上级别的信息(onMatch),其他的直接拒绝(onMismatch)-->
<ThresholdFilter level="error" onMatch="ACCEPT" onMismatch="DENY"/>
<PatternLayout pattern="${LOG_PATTERN}"/>
<Policies>
<!--interval属性用来指定多久滚动一次,默认是1 hour-->
<TimeBasedTriggeringPolicy interval="1"/>
<SizeBasedTriggeringPolicy size="10MB"/>
</Policies>
<!-- DefaultRolloverStrategy属性如不设置,则默认为最多同一文件夹下7个文件开始覆盖-->
<DefaultRolloverStrategy max="15"/>
</RollingFile>
</appenders>
<!--Logger节点用来单独指定日志的形式,比如要为指定包下的class指定不同的日志级别等。-->
<!--然后定义loggers,只有定义了logger并引入的appender,appender才会生效-->
<loggers>
<!--过滤掉spring和mybatis的一些无用的DEBUG信息-->
<logger name="org.mybatis" level="info" additivity="false">
<AppenderRef ref="Console"/>
</logger>
<!--监控系统信息-->
<!--若是additivity设为false,则 子Logger 只会在自己的appender里输出,而不会在 父Logger 的appender里输出。-->
<Logger name="org.springframework" level="info" additivity="false">
<AppenderRef ref="Console"/>
</Logger>
<!--配置输出sql语句-->
<logger name="org.apache.ibatis" level="DEBUG"/>
<logger name="java.sql.Connection" level="DEBUG"/>
<logger name="java.sql.Statement" level="DEBUG"/>
<logger name="java.sql.PreparedStatement" level="DEBUG"/>
<root level="info">
<appender-ref ref="Console"/>
<appender-ref ref="Filelog"/>
<appender-ref ref="RollingFileInfo"/>
<appender-ref ref="RollingFileWarn"/>
<appender-ref ref="RollingFileError"/>
</root>
</loggers>
</configuration>

8
src/main/resources/mapper/DataLineageInfoMapper.xml

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.guozhi.bloodanalysis.mapper.DataLineageInfoMapper">
<select id="search" resultType="com.guozhi.bloodanalysis.entity.DataLineageInfo">
select onum, ssys_cd as ssysCd,mdl_name as mdlName,proc_name as procName,proc_line as procLine,proc_text as procText
from t_metadata_data_lineage_info
</select>
</mapper>

55
src/main/resources/mapper/MetaBloodAnalysisMapper.xml

@ -0,0 +1,55 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.guozhi.bloodanalysis.mapper.MetaBloodAnalysisMapper">
<insert id="insert" parameterType="com.guozhi.bloodanalysis.entity.MetaBloodAnalysis">
insert into meta_blood_analysis (id,proId,proName,
targetSysCd,targetMdlName,targetTableName,targetTableCnName,targetColName,targetColCnName,targetColType,
sourceSysCd,sourceMdlName,sourceTableName,sourceTableCnName,sourceColName,sourceColCnName,sourceColType)
values(
#{blood.id,jdbcType=VARCHAR},
#{blood.proId,jdbcType=INTEGER},
#{blood.proName,jdbcType=VARCHAR},
#{blood.targetSysCd,jdbcType=VARCHAR},
#{blood.targetMdlName,jdbcType=BOOLEAN},
#{blood.targetTableName,jdbcType=VARCHAR},
#{blood.targetTableCnName,jdbcType=VARCHAR},
#{blood.targetColName,jdbcType=VARCHAR},
#{blood.targetColCnName,jdbcType=VARCHAR},
#{blood.targetColType,jdbcType=VARCHAR},
#{blood.sourceSysCd,jdbcType=VARCHAR},
#{blood.sourceMdlName,jdbcType=BOOLEAN},
#{blood.sourceTableName,jdbcType=VARCHAR},
#{blood.sourceTableCnName,jdbcType=VARCHAR},
#{blood.sourceColName,jdbcType=VARCHAR},
#{blood.sourceColCnName,jdbcType=VARCHAR},
#{blood.sourceColType,jdbcType=VARCHAR}
)
</insert>
<select id="isColExis" resultType="com.guozhi.bloodanalysis.entity.MetaColumn">
select a.onum as onum,a.ssys_cd as ssysCd,a.mdl_name as mdlName,a.tab_eng_name as tabEngName, b.tab_cn_name as tabCnName,
a.fld_eng_name as fldEngName,a.fld_cn_name as fldCnName from t_metadata_fld_tab_extract_info a
left join t_metadata_extract_info b on a.ssys_cd = b.ssys_cd and a.mdl_name = b.mdl_name and a.tab_eng_name = b.tab_eng_name
where a.ssys_cd = #{db} and a.mdl_name = #{schema} and a.tab_eng_name = #{tableCode} and a.fld_eng_name = #{columnName}
</select>
<select id="getColumnsByTabId" resultType="java.util.Map">
select fld_eng_name as colCode, fld_cn_name as colName,fld_no as fldNo from t_metadata_fld_tab_extract_info a
left join t_metadata_extract_info b on a.ssys_cd = b.ssys_cd and a.mdl_name = b.mdl_name and a.tab_eng_name = b.tab_eng_name
where b.onum = #{tableId}
<if test="colCode != null and colCode !=''">
and a.fld_eng_name = #{colCode}
</if>
order by fld_no
</select>
<select id="getColumnsByTable" resultType="java.util.Map">
select fld_eng_name as colCode, fld_cn_name as colName,fld_no as fldNo from t_metadata_fld_tab_extract_info a
where ssys_cd =#{ssysCd} and mdl_name = #{mdlName} and tab_eng_name = #{tableName}
order by fld_no
</select>
<select id="getSystem" resultType="java.util.Map">
select ssys_cd as SYS_CODE, ssys_cd as SYS_NAME, mdl_name as SCH_CODE, mdl_name as SCH_NAME, tab_eng_name as TAB_CODE, tab_cn_name as TAB_NAME, onum as TAB_ID from t_metadata_extract_info where mdl_name = #{schema} and tab_eng_name = #{tableCode}
</select>
<delete id="deleteAllBloodData">
delete from meta_blood_analysis
</delete>
</mapper>

13
src/test/java/com/guozhi/bloodanalysis/BloodAnalysisApplicationTests.java

@ -0,0 +1,13 @@
package com.guozhi.bloodanalysis;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class BloodAnalysisApplicationTests {
@Test
void contextLoads() {
}
}
Loading…
Cancel
Save