diff --git a/infra/database/type/hive/pom.xml b/infra/database/type/hive/pom.xml
new file mode 100644
index 0000000000000..5b98f9140566e
--- /dev/null
+++ b/infra/database/type/hive/pom.xml
@@ -0,0 +1,43 @@
+
+
+
+
+ 4.0.0
+
+ org.apache.shardingsphere
+ shardingsphere-infra-database-type
+ 5.5.1-SNAPSHOT
+
+ shardingsphere-infra-database-hive
+ ${project.artifactId}
+
+
+
+ org.apache.shardingsphere
+ shardingsphere-infra-database-core
+ ${project.version}
+
+
+
+ org.apache.shardingsphere
+ shardingsphere-test-util
+ ${project.version}
+ test
+
+
+
diff --git a/infra/database/type/hive/src/main/java/org/apache/shardingsphere/infra/database/hive/type/HiveDatabaseType.java b/infra/database/type/hive/src/main/java/org/apache/shardingsphere/infra/database/hive/type/HiveDatabaseType.java
new file mode 100644
index 0000000000000..2ea71356298a7
--- /dev/null
+++ b/infra/database/type/hive/src/main/java/org/apache/shardingsphere/infra/database/hive/type/HiveDatabaseType.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.infra.database.hive.type;
+
+import org.apache.shardingsphere.infra.database.core.type.DatabaseType;
+
+import java.util.Collection;
+import java.util.Collections;
+
+/**
+ * Database type of hive.
+ */
+public final class HiveDatabaseType implements DatabaseType {
+
+ @Override
+ public Collection getJdbcUrlPrefixes() {
+ return Collections.singleton("jdbc:hive2:");
+ }
+
+ @Override
+ public String getType() {
+ return "Hive";
+ }
+}
diff --git a/infra/database/type/hive/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.database.core.type.DatabaseType b/infra/database/type/hive/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.database.core.type.DatabaseType
new file mode 100644
index 0000000000000..9379ea14d5296
--- /dev/null
+++ b/infra/database/type/hive/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.database.core.type.DatabaseType
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.shardingsphere.infra.database.hive.type.HiveDatabaseType
diff --git a/infra/database/type/hive/src/test/java/org/apache/shardingsphere/infra/database/hive/type/HiveDatabaseTypeTest.java b/infra/database/type/hive/src/test/java/org/apache/shardingsphere/infra/database/hive/type/HiveDatabaseTypeTest.java
new file mode 100644
index 0000000000000..066346f759264
--- /dev/null
+++ b/infra/database/type/hive/src/test/java/org/apache/shardingsphere/infra/database/hive/type/HiveDatabaseTypeTest.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.infra.database.hive.type;
+
+import org.apache.shardingsphere.infra.database.core.type.DatabaseType;
+import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader;
+import org.junit.jupiter.api.Test;
+
+import java.util.Collections;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+class HiveDatabaseTypeTest {
+
+ @Test
+ void assertGetJdbcUrlPrefixes() {
+ assertThat(TypedSPILoader.getService(DatabaseType.class, "Hive").getJdbcUrlPrefixes(), is(Collections.singleton("jdbc:hive2:")));
+ }
+}
diff --git a/infra/database/type/pom.xml b/infra/database/type/pom.xml
index 9dbe85516b381..a73a489a84d02 100644
--- a/infra/database/type/pom.xml
+++ b/infra/database/type/pom.xml
@@ -36,6 +36,7 @@
sqlserver
clickhouse
doris
+ hive
h2
sql92
testcontainers
diff --git a/parser/sql/dialect/hive/pom.xml b/parser/sql/dialect/hive/pom.xml
new file mode 100644
index 0000000000000..710ccfff17025
--- /dev/null
+++ b/parser/sql/dialect/hive/pom.xml
@@ -0,0 +1,40 @@
+
+
+
+
+ 4.0.0
+
+ org.apache.shardingsphere
+ shardingsphere-parser-sql-dialect
+ 5.5.1-SNAPSHOT
+
+ shardingsphere-parser-sql-hive
+ ${project.artifactId}
+
+
+ doris
+
+
+
+
+ org.apache.shardingsphere
+ shardingsphere-infra-database-hive
+ ${project.version}
+
+
+
diff --git a/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Alphabet.g4 b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Alphabet.g4
new file mode 100644
index 0000000000000..f7603cc7e6384
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Alphabet.g4
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+lexer grammar Alphabet;
+
+FOR_GENERATOR: 'DO NOT MATCH ANY THING, JUST FOR GENERATOR';
+
+fragment A: [Aa];
+fragment B: [Bb];
+fragment C: [Cc];
+fragment D: [Dd];
+fragment E: [Ee];
+fragment F: [Ff];
+fragment G: [Gg];
+fragment H: [Hh];
+fragment I: [Ii];
+fragment J: [Jj];
+fragment K: [Kk];
+fragment L: [Ll];
+fragment M: [Mm];
+fragment N: [Nn];
+fragment O: [Oo];
+fragment P: [Pp];
+fragment Q: [Qq];
+fragment R: [Rr];
+fragment S: [Ss];
+fragment T: [Tt];
+fragment U: [Uu];
+fragment V: [Vv];
+fragment W: [Ww];
+fragment X: [Xx];
+fragment Y: [Yy];
+fragment Z: [Zz];
+fragment UL_: '_';
diff --git a/parser/sql/dialect/hive/src/main/antlr4/imports/hive/BaseRule.g4 b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/BaseRule.g4
new file mode 100644
index 0000000000000..a966f19387807
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/BaseRule.g4
@@ -0,0 +1,1338 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+grammar BaseRule;
+
+import Symbol, Keyword, HiveKeyword, Literals;
+
+parameterMarker
+ : QUESTION_
+ ;
+
+customKeyword
+ : MAX
+ | MIN
+ | SUM
+ | COUNT
+ | GROUP_CONCAT
+ | CAST
+ | POSITION
+ | SUBSTRING
+ | SUBSTR
+ | EXTRACT
+ | TRIM
+ | LAST_DAY
+ | TRADITIONAL
+ | TREE
+ | MYSQL_ADMIN
+ | INSTANT
+ | INPLACE
+ | COPY
+ | UL_BINARY
+ | AUTOCOMMIT
+ | INNODB
+ | REDO_LOG
+ | LAST_VALUE
+ | PRIMARY
+ | MAXVALUE
+ | BIT_XOR
+ | MYSQL_MAIN
+ | UTC_DATE
+ | UTC_TIME
+ | UTC_TIMESTAMP
+ | UTC_TIMESTAMP
+ ;
+
+literals
+ : stringLiterals
+ | numberLiterals
+ | temporalLiterals
+ | hexadecimalLiterals
+ | bitValueLiterals
+ | booleanLiterals
+ | nullValueLiterals
+ ;
+
+string_
+ : DOUBLE_QUOTED_TEXT | SINGLE_QUOTED_TEXT
+ ;
+
+stringLiterals
+ : (UNDERSCORE_CHARSET | UL_BINARY )? string_ | NCHAR_TEXT
+ ;
+
+numberLiterals
+ : (PLUS_ | MINUS_)? NUMBER_
+ ;
+
+temporalLiterals
+ : (DATE | TIME | TIMESTAMP) SINGLE_QUOTED_TEXT
+ ;
+
+hexadecimalLiterals
+ : UNDERSCORE_CHARSET? HEX_DIGIT_ collateClause?
+ ;
+
+bitValueLiterals
+ : UNDERSCORE_CHARSET? BIT_NUM_ collateClause?
+ ;
+
+booleanLiterals
+ : TRUE | FALSE
+ ;
+
+nullValueLiterals
+ : NULL
+ ;
+
+collationName
+ : textOrIdentifier | BINARY
+ ;
+
+identifier
+ : IDENTIFIER_
+ | identifierKeywordsUnambiguous
+ | identifierKeywordsAmbiguous1RolesAndLabels
+ | identifierKeywordsAmbiguous2Labels
+ | identifierKeywordsAmbiguous3Roles
+ | identifierKeywordsAmbiguous4SystemVariables
+ | customKeyword
+ | DOUBLE_QUOTED_TEXT
+ | UNDERSCORE_CHARSET
+ | BQUOTA_STRING
+ ;
+
+identifierKeywordsUnambiguous
+ : ACTION
+ | ACCOUNT
+ | ACTIVE
+// | ADDDATE
+ | ADMIN
+ | AFTER
+ | AGAINST
+ | AGGREGATE
+ | ALGORITHM
+ | ALWAYS
+ | ANY
+ | ARRAY
+ | AT
+ | ATTRIBUTE
+ | AUTOEXTEND_SIZE
+ | AUTO_INCREMENT
+ | AVG_ROW_LENGTH
+ | AVG
+ | BACKUP
+ | BINLOG
+ | BIT
+ | BLOCK
+ | BOOLEAN
+ | BOOL
+ | BTREE
+ | BUCKETS
+ | CASCADED
+ | CATALOG_NAME
+ | CHAIN
+ | CHANGED
+ | CHANNEL
+ | CIPHER
+ | CLASS_ORIGIN
+ | CLIENT
+ | CLOSE
+ | COALESCE
+ | CODE
+ | COLLATION
+ | COLUMNS
+ | COLUMN_FORMAT
+ | COLUMN_NAME
+ | COMMITTED
+ | COMPACT
+ | COMPLETION
+ | COMPONENT
+ | COMPRESSED
+ | COMPRESSION
+ | CONCURRENT
+ | CONNECTION
+ | CONSISTENT
+ | CONSTRAINT_CATALOG
+ | CONSTRAINT_NAME
+ | CONSTRAINT_SCHEMA
+ | CONTEXT
+ | CPU
+ | CREATE
+ | CURRENT
+ | CURSOR_NAME
+ | DATAFILE
+ | DATA
+ | DATETIME
+ | DATE
+ | DAY
+ | DAY_MINUTE
+ | DEFAULT_AUTH
+ | DEFINER
+ | DEFINITION
+ | DELAY_KEY_WRITE
+ | DESCRIPTION
+ | DIAGNOSTICS
+ | DIRECTORY
+ | DISABLE
+ | DISCARD
+ | DISK
+ | DUMPFILE
+ | DUPLICATE
+ | DYNAMIC
+ | ENABLE
+ | ENCRYPTION
+ | ENDS
+ | ENFORCED
+ | ENGINES
+ | ENGINE
+ | ENGINE_ATTRIBUTE
+ | ENUM
+ | ERRORS
+ | ERROR
+ | ESCAPE
+ | EVENTS
+ | EVERY
+ | EXCHANGE
+ | EXCLUDE
+ | EXPANSION
+ | EXPIRE
+ | EXPORT
+ | EXTENDED
+ | EXTENT_SIZE
+ | FAILED_LOGIN_ATTEMPTS
+ | FAST
+ | FAULTS
+ | FILE_BLOCK_SIZE
+ | FILTER
+ | FIRST
+ | FIXED
+ | FOLLOWING
+ | FORMAT
+ | FOUND
+ | FULL
+ | GENERAL
+ | GEOMETRYCOLLECTION
+ | GEOMETRY
+ | GET_FORMAT
+ | GET_MASTER_PUBLIC_KEY
+ | GRANTS
+ | GROUP_REPLICATION
+ | HASH
+ | HISTOGRAM
+ | HISTORY
+ | HOSTS
+ | HOST
+ | HOUR
+ | IDENTIFIED
+ | IGNORE_SERVER_IDS
+ | INACTIVE
+ | INDEXES
+ | INITIAL_SIZE
+ | INSERT_METHOD
+ | INSTANCE
+ | INVISIBLE
+ | INVOKER
+ | IO
+ | IPC
+ | ISOLATION
+ | ISSUER
+ | JSON
+ | JSON_VALUE
+ | KEY
+ | KEY_BLOCK_SIZE
+ | LAST
+ | LEAVES
+ | LESS
+ | LEVEL
+ | LINESTRING
+ | LIST
+ | LOCKED
+ | LOCKS
+ | LOGFILE
+ | LOGS
+ | MASTER_AUTO_POSITION
+ | MASTER_COMPRESSION_ALGORITHM
+ | MASTER_CONNECT_RETRY
+ | MASTER_DELAY
+ | MASTER_HEARTBEAT_PERIOD
+ | MASTER_HOST
+ | NETWORK_NAMESPACE
+ | MASTER_LOG_FILE
+ | MASTER_LOG_POS
+ | MASTER_PASSWORD
+ | MASTER_PORT
+ | MASTER_PUBLIC_KEY_PATH
+ | MASTER_RETRY_COUNT
+ | MASTER_SERVER_ID
+ | MASTER_SSL_CAPATH
+ | MASTER_SSL_CA
+ | MASTER_SSL_CERT
+ | MASTER_SSL_CIPHER
+ | MASTER_SSL_CRLPATH
+ | MASTER_SSL_CRL
+ | MASTER_SSL_KEY
+ | MASTER_SSL
+ | MASTER
+ | MASTER_TLS_CIPHERSUITES
+ | MASTER_TLS_VERSION
+ | MASTER_USER
+ | MASTER_ZSTD_COMPRESSION_LEVEL
+ | MAX_CONNECTIONS_PER_HOUR
+ | MAX_QUERIES_PER_HOUR
+ | MAX_ROWS
+ | MAX_SIZE
+ | MAX_UPDATES_PER_HOUR
+ | MAX_USER_CONNECTIONS
+ | MEDIUM
+ | MEMBER
+ | MEMORY
+ | MERGE
+ | MESSAGE_TEXT
+ | MICROSECOND
+ | MIGRATE
+ | MINUTE
+ | MIN_ROWS
+ | MODE
+ | MODIFY
+ | MONTH
+ | MULTILINESTRING
+ | MULTIPOINT
+ | MULTIPOLYGON
+ | MUTEX
+ | MYSQL_ERRNO
+ | NAMES
+ | NAME
+ | NATIONAL
+ | NCHAR
+ | NDBCLUSTER
+ | NESTED
+ | NEVER
+ | NEW
+ | NEXT
+ | NODEGROUP
+ | NOWAIT
+ | NO_WAIT
+ | NULLS
+ | NUMBER
+ | NVARCHAR
+ | OFF
+ | OFFSET
+ | OJ
+ | OLD
+ | ONE
+ | ONLY
+ | OPEN
+ | OPTIONAL
+ | OPTIONS
+ | ORDINALITY
+ | ORGANIZATION
+ | OTHERS
+ | OWNER
+ | PACK_KEYS
+ | PAGE
+ | PARSER
+ | PARTIAL
+ | PARTITIONING
+ | PARTITIONS
+ | PASSWORD
+ | PASSWORD_LOCK_TIME
+ | PATH
+ | PHASE
+ | PLUGINS
+ | PLUGIN_DIR
+ | PLUGIN
+ | POINT
+ | POLYGON
+ | PORT
+ | PRECEDING
+ | PRESERVE
+ | PREV
+ | PRIVILEGES
+ | PRIVILEGE_CHECKS_USER
+ | PROCESSLIST
+ | PROFILES
+ | PROFILE
+ | QUARTER
+ | QUERY
+ | QUICK
+ | RANDOM
+ | READ_ONLY
+ | REBUILD
+ | RECOVER
+ | REDO_BUFFER_SIZE
+ | REDUNDANT
+ | REFERENCE
+ | RELAY
+ | RELAYLOG
+ | RELAY_LOG_FILE
+ | RELAY_LOG_POS
+ | RELAY_THREAD
+ | REMOVE
+ | REORGANIZE
+ | REPEATABLE
+ | REPLICATE_DO_DB
+ | REPLICATE_DO_TABLE
+ | REPLICATE_IGNORE_DB
+ | REPLICATE_IGNORE_TABLE
+ | REPLICATE_REWRITE_DB
+ | REPLICATE_WILD_DO_TABLE
+ | REPLICATE_WILD_IGNORE_TABLE
+ | REQUIRE_ROW_FORMAT
+// | REQUIRE_TABLE_PRIMARY_KEY_CHECK
+ | USER_RESOURCES
+ | RESPECT
+ | RESTORE
+ | RESUME
+ | RETAIN
+ | RETURNED_SQLSTATE
+ | RETURNING
+ | RETURNS
+ | REUSE
+ | REVERSE
+ | ROLE
+ | ROLLUP
+ | ROTATE
+ | ROUTINE
+ | ROW_COUNT
+ | ROW_FORMAT
+ | RTREE
+ | SCHEDULE
+ | SCHEMA_NAME
+ | SECONDARY_ENGINE
+ | SECONDARY_ENGINE_ATTRIBUTE
+ | SECONDARY_LOAD
+ | SECONDARY
+ | SECONDARY_UNLOAD
+ | SECOND
+ | SECURITY
+ | SERIALIZABLE
+ | SERIAL
+ | SERVER
+ | SHARE
+ | SIMPLE
+// | SKIP
+ | SLOW
+ | SNAPSHOT
+ | SOCKET
+ | SONAME
+ | SOUNDS
+ | SOURCE
+ | SQL_AFTER_GTIDS
+ | SQL_AFTER_MTS_GAPS
+ | SQL_BEFORE_GTIDS
+ | SQL_BUFFER_RESULT
+ | SQL_NO_CACHE
+ | SQL_THREAD
+ | SRID
+ | STACKED
+ | STARTS
+ | STATS_AUTO_RECALC
+ | STATS_PERSISTENT
+ | STATS_SAMPLE_PAGES
+ | STATUS
+ | STORAGE
+ | STREAM
+ | STRING
+ | SUBCLASS_ORIGIN
+// | SUBDATE
+ | SUBJECT
+ | SUBPARTITIONS
+ | SUBPARTITION
+ | SUSPEND
+ | SWAPS
+ | SWITCHES
+ | SYSTEM
+ | TABLE
+ | TABLES
+ | TABLESPACE
+ | TABLE_CHECKSUM
+ | TABLE_NAME
+ | TEMPORARY
+ | TEMPTABLE
+ | TEXT
+ | THAN
+ | THREAD_PRIORITY
+ | TIES
+ | TIMESTAMP_ADD
+ | TIMESTAMP_DIFF
+ | TIMESTAMP
+ | TIME
+ | TLS
+ | TRANSACTION
+ | TRIGGERS
+ | TYPES
+ | TYPE
+ | UNBOUNDED
+ | UNCOMMITTED
+ | UNDEFINED
+ | UNDOFILE
+ | UNDO_BUFFER_SIZE
+ | UNKNOWN
+ | UNTIL
+ | UPGRADE
+ | USER
+ | USE_FRM
+ | VALIDATION
+ | VALUE
+ | VARIABLES
+ | VCPU
+ | VIEW
+ | VISIBLE
+ | WAIT
+ | WARNINGS
+ | WEEK
+ | WEIGHT_STRING
+ | WITHOUT
+ | WORK
+ | WRAPPER
+ | X509
+ | XID
+ | XML
+ | YEAR
+ | YEAR_MONTH
+ ;
+
+identifierKeywordsAmbiguous1RolesAndLabels
+ : EXECUTE
+ | RESTART
+ | SHUTDOWN
+ ;
+
+identifierKeywordsAmbiguous2Labels
+ : ASCII
+ | BEGIN
+ | BYTE
+ | CACHE
+ | CHARSET
+ | CHECKSUM
+ | CLONE
+ | COMMENT
+ | COMMIT
+ | CONTAINS
+ | DEALLOCATE
+ | DO
+ | END
+ | FLUSH
+ | FOLLOWS
+ | HANDLER
+ | HELP
+ | IMPORT
+ | INSTALL
+ | LANGUAGE
+ | NO
+ | PRECEDES
+ | PREPARE
+ | REPAIR
+ | RESET
+ | ROLLBACK
+ | SAVEPOINT
+ | SIGNED
+ | SLAVE
+ | START
+ | STOP
+ | TRUNCATE
+ | UNICODE
+ | UNINSTALL
+ | XA
+ ;
+
+identifierKeywordsAmbiguous3Roles
+ : EVENT
+ | FILE
+ | NONE
+ | PROCESS
+ | PROXY
+ | RELOAD
+ | REPLICATION
+ | RESOURCE
+ | SUPER
+ ;
+
+identifierKeywordsAmbiguous4SystemVariables
+ : GLOBAL
+ | LOCAL
+ | PERSIST
+ | PERSIST_ONLY
+ | SESSION
+ ;
+
+textOrIdentifier
+ : identifier | string_ | ipAddress
+ ;
+
+ipAddress
+ : IP_ADDRESS
+ ;
+
+variable
+ : userVariable | systemVariable
+ ;
+
+userVariable
+ : AT_ textOrIdentifier
+ | textOrIdentifier
+ ;
+
+systemVariable
+ : AT_ AT_ (systemVariableScope=(GLOBAL | SESSION | LOCAL) DOT_)? rvalueSystemVariable
+ ;
+
+rvalueSystemVariable
+ : textOrIdentifier
+ | textOrIdentifier DOT_ identifier
+ ;
+
+setSystemVariable
+ : AT_ AT_ (optionType DOT_)? internalVariableName
+ ;
+
+optionType
+ : GLOBAL | PERSIST | PERSIST_ONLY | SESSION | LOCAL
+ ;
+
+internalVariableName
+ : identifier
+ | DEFAULT DOT_ identifier
+ | identifier DOT_ identifier
+ ;
+
+setExprOrDefault
+ : expr | DEFAULT | ALL | ON | BINARY | ROW | SYSTEM
+ ;
+
+transactionCharacteristics
+ : transactionAccessMode (COMMA_ isolationLevel)?
+ | isolationLevel (COMMA_ transactionAccessMode)?
+ ;
+
+isolationLevel
+ : ISOLATION LEVEL isolationTypes
+ ;
+
+isolationTypes
+ : REPEATABLE READ | READ COMMITTED | READ UNCOMMITTED | SERIALIZABLE
+ ;
+
+transactionAccessMode
+ : READ (WRITE | ONLY)
+ ;
+
+schemaName
+ : identifier
+ ;
+
+schemaNames
+ : schemaName (COMMA_ schemaName)*
+ ;
+
+charsetName
+ : textOrIdentifier | BINARY | DEFAULT
+ ;
+
+schemaPairs
+ : schemaPair (COMMA_ schemaPair)*
+ ;
+
+schemaPair
+ : LP_ schemaName COMMA_ schemaName RP_
+ ;
+
+tableName
+ : (owner DOT_)? name
+ ;
+
+columnName
+ : identifier
+ ;
+
+indexName
+ : identifier
+ ;
+
+constraintName
+ : identifier
+ ;
+
+oldColumn
+ : columnName
+ ;
+
+newColumn
+ : columnName
+ ;
+
+delimiterName
+ : textOrIdentifier | ('\\'. | ~('\'' | '"' | '`' | '\\'))+
+ ;
+
+userIdentifierOrText
+ : textOrIdentifier (AT_ textOrIdentifier)?
+ ;
+
+username
+ : userIdentifierOrText | CURRENT_USER (LP_ RP_)?
+ ;
+
+eventName
+ : (owner DOT_)? identifier
+ ;
+
+serverName
+ : textOrIdentifier
+ ;
+
+wrapperName
+ : textOrIdentifier
+ ;
+
+functionName
+ : (owner DOT_)? identifier
+ ;
+
+procedureName
+ : (owner DOT_)? identifier
+ ;
+
+viewName
+ : (owner DOT_)? identifier
+ ;
+
+owner
+ : identifier
+ ;
+
+alias
+ : textOrIdentifier
+ ;
+
+name
+ : identifier
+ ;
+
+tableList
+ : tableName (COMMA_ tableName)*
+ ;
+
+viewNames
+ : viewName (COMMA_ viewName)*
+ ;
+
+columnNames
+ : columnName (COMMA_ columnName)*
+ ;
+
+groupName
+ : identifier
+ ;
+
+routineName
+ : identifier
+ ;
+
+shardLibraryName
+ : stringLiterals
+ ;
+
+componentName
+ : string_
+ ;
+
+pluginName
+ : textOrIdentifier
+ ;
+
+hostname
+ : string_
+ ;
+
+port
+ : NUMBER_
+ ;
+
+cloneInstance
+ : username AT_ hostname COLON_ port
+ ;
+
+cloneDir
+ : string_
+ ;
+
+channelName
+ : identifier (DOT_ identifier)?
+ ;
+
+logName
+ : stringLiterals
+ ;
+
+roleName
+ : roleIdentifierOrText (AT_ textOrIdentifier)?
+ ;
+
+roleIdentifierOrText
+ : identifier | string_
+ ;
+
+engineRef
+ : textOrIdentifier
+ ;
+
+triggerName
+ : identifier (DOT_ identifier)?
+ ;
+
+triggerTime
+ : BEFORE | AFTER
+ ;
+
+tableOrTables
+ : TABLE | TABLES
+ ;
+
+userOrRole
+ : username | roleName
+ ;
+
+partitionName
+ : identifier
+ ;
+
+identifierList
+ : identifier (COMMA_ identifier)*
+ ;
+
+allOrPartitionNameList
+ : ALL | identifierList
+ ;
+
+triggerEvent
+ : INSERT | UPDATE | DELETE
+ ;
+
+triggerOrder
+ : (FOLLOWS | PRECEDES) triggerName
+ ;
+
+expr
+ : booleanPrimary
+ | expr andOperator expr
+ | expr orOperator expr
+ | expr XOR expr
+ | notOperator expr
+ ;
+
+andOperator
+ : AND | AND_
+ ;
+
+orOperator
+ : OR | OR_
+ ;
+
+notOperator
+ : NOT | NOT_
+ ;
+
+booleanPrimary
+ : booleanPrimary IS NOT? (TRUE | FALSE | UNKNOWN | NULL)
+ | booleanPrimary SAFE_EQ_ predicate
+ | booleanPrimary MEMBER OF LP_ (expr) RP_
+ | booleanPrimary comparisonOperator predicate
+ | booleanPrimary comparisonOperator (ALL | ANY) subquery
+ | booleanPrimary assignmentOperator predicate
+ | predicate
+ ;
+
+assignmentOperator
+ : EQ_ | ASSIGNMENT_
+ ;
+
+comparisonOperator
+ : EQ_ | GTE_ | GT_ | LTE_ | LT_ | NEQ_
+ ;
+
+predicate
+ : bitExpr NOT? IN subquery
+ | bitExpr NOT? IN LP_ expr (COMMA_ expr)* RP_
+ | bitExpr NOT? BETWEEN bitExpr AND predicate
+ | bitExpr SOUNDS LIKE bitExpr
+ | bitExpr NOT? LIKE simpleExpr (ESCAPE simpleExpr)?
+ | bitExpr NOT? (REGEXP | RLIKE) bitExpr
+ | bitExpr
+ ;
+
+bitExpr
+ : bitExpr VERTICAL_BAR_ bitExpr
+ | bitExpr AMPERSAND_ bitExpr
+ | bitExpr SIGNED_LEFT_SHIFT_ bitExpr
+ | bitExpr SIGNED_RIGHT_SHIFT_ bitExpr
+ | bitExpr PLUS_ bitExpr
+ | bitExpr MINUS_ bitExpr
+ | bitExpr ASTERISK_ bitExpr
+ | bitExpr SLASH_ bitExpr
+ | bitExpr DIV bitExpr
+ | bitExpr MOD bitExpr
+ | bitExpr MOD_ bitExpr
+ | bitExpr CARET_ bitExpr
+ | bitExpr PLUS_ intervalExpression
+ | bitExpr MINUS_ intervalExpression
+ | simpleExpr
+ ;
+
+simpleExpr
+ : functionCall
+ | parameterMarker
+ | literals
+ | columnRef
+ | simpleExpr collateClause
+ | variable
+ | simpleExpr OR_ simpleExpr
+ | (PLUS_ | MINUS_ | TILDE_ | notOperator | BINARY) simpleExpr
+ | ROW? LP_ expr (COMMA_ expr)* RP_
+ | EXISTS? subquery
+ | LBE_ identifier expr RBE_
+ | identifier (JSON_SEPARATOR | JSON_UNQUOTED_SEPARATOR) string_
+ | path (RETURNING dataType)? onEmptyError?
+ | matchExpression
+ | caseExpression
+ | intervalExpression
+ ;
+
+path
+ : string_
+ ;
+
+onEmptyError
+ : (NULL | ERROR | DEFAULT literals) ON (EMPTY | ERROR)
+ ;
+
+columnRef
+ : identifier (DOT_ identifier)? (DOT_ identifier)?
+ ;
+
+columnRefList
+ : columnRef (COMMA_ columnRef)*
+ ;
+
+functionCall
+ : aggregationFunction | specialFunction | regularFunction | jsonFunction | udfFunction
+ ;
+
+udfFunction
+ : functionName LP_ (expr? | expr (COMMA_ expr)*) RP_
+ ;
+
+aggregationFunction
+ : aggregationFunctionName LP_ distinct? (expr (COMMA_ expr)* | ASTERISK_)? collateClause? RP_ overClause?
+ ;
+
+jsonFunction
+ : columnRef (JSON_SEPARATOR | JSON_UNQUOTED_SEPARATOR) path
+ | jsonFunctionName LP_ (expr? | expr (COMMA_ expr)*) RP_
+ ;
+
+jsonFunctionName
+ : JSON_ARRAY | JSON_ARRAY_APPEND | JSON_ARRAY_INSERT | JSON_CONTAINS
+ | JSON_CONTAINS_PATH | JSON_DEPTH | JSON_EXTRACT | JSON_INSERT | JSON_KEYS | JSON_LENGTH | JSON_MERGE | JSON_MERGE_PATCH
+ | JSON_MERGE_PRESERVE | JSON_OBJECT | JSON_OVERLAPS | JSON_PRETTY | JSON_QUOTE | JSON_REMOVE | JSON_REPLACE
+ | JSON_SCHEMA_VALID | JSON_SCHEMA_VALIDATION_REPORT | JSON_SEARCH | JSON_SET | JSON_STORAGE_FREE | JSON_STORAGE_SIZE
+ | JSON_TABLE | JSON_TYPE | JSON_UNQUOTE | JSON_VALID | JSON_VALUE | MEMBER OF
+ ;
+
+aggregationFunctionName
+ : MAX | MIN | SUM | COUNT | AVG | BIT_XOR
+ ;
+
+distinct
+ : DISTINCT
+ ;
+
+overClause
+ : OVER (windowSpecification | identifier)
+ ;
+
+windowSpecification
+ : LP_ identifier? (PARTITION BY expr (COMMA_ expr)*)? orderByClause? frameClause? RP_
+ ;
+
+frameClause
+ : (ROWS | RANGE) (frameStart | frameBetween)
+ ;
+
+frameStart
+ : CURRENT ROW | UNBOUNDED PRECEDING | UNBOUNDED FOLLOWING | expr PRECEDING | expr FOLLOWING
+ ;
+
+frameEnd
+ : frameStart
+ ;
+
+frameBetween
+ : BETWEEN frameStart AND frameEnd
+ ;
+
+specialFunction
+ : groupConcatFunction | windowFunction | castFunction | convertFunction | positionFunction | substringFunction | extractFunction
+ | charFunction | trimFunction | weightStringFunction | valuesFunction | currentUserFunction
+ ;
+
+currentUserFunction
+ : CURRENT_USER (LP_ RP_)?
+ ;
+
+groupConcatFunction
+ : GROUP_CONCAT LP_ distinct? (expr (COMMA_ expr)* | ASTERISK_)? (orderByClause)? (SEPARATOR expr)? RP_
+ ;
+
+windowFunction
+ : funcName = (ROW_NUMBER | RANK | DENSE_RANK | CUME_DIST | PERCENT_RANK) LP_ RP_ windowingClause
+ | funcName = NTILE (simpleExpr) windowingClause
+ | funcName = (LEAD | LAG) LP_ expr leadLagInfo? RP_ nullTreatment? windowingClause
+ | funcName = (FIRST_VALUE | LAST_VALUE) LP_ expr RP_ nullTreatment? windowingClause
+ | funcName = NTH_VALUE LP_ expr COMMA_ simpleExpr RP_ (FROM (FIRST | LAST))? nullTreatment? windowingClause
+ ;
+
+windowingClause
+ : OVER (windowName=identifier | windowSpecification)
+ ;
+
+leadLagInfo
+ : COMMA_ (NUMBER_ | QUESTION_) (COMMA_ expr)?
+ ;
+
+nullTreatment
+ : (RESPECT | IGNORE) NULLS
+ ;
+
+checkType
+ : FOR UPGRADE | QUICK | FAST | MEDIUM | EXTENDED | CHANGED
+ ;
+
+repairType
+ : QUICK | EXTENDED | USE_FRM
+ ;
+
+castFunction
+ : CAST LP_ expr AS castType ARRAY? RP_
+ | CAST LP_ expr AT TIME ZONE expr AS DATETIME typeDatetimePrecision? RP_
+ ;
+
+convertFunction
+ : CONVERT LP_ expr COMMA_ castType RP_
+ | CONVERT LP_ expr USING charsetName RP_
+ ;
+
+castType
+ : castTypeName = BINARY fieldLength?
+ | castTypeName = CHAR fieldLength? charsetWithOptBinary?
+ | (castTypeName = NCHAR | castTypeName = NATIONAL_CHAR) fieldLength?
+ | castTypeName = (SIGNED | SIGNED_INT | SIGNED_INTEGER)
+ | castTypeName = (UNSIGNED | UNSIGNED_INT | UNSIGNED_INTEGER)
+ | castTypeName = DATE
+ | castTypeName = TIME typeDatetimePrecision?
+ | castTypeName = DATETIME typeDatetimePrecision?
+ | castTypeName = DECIMAL (fieldLength | precision)?
+ | castTypeName = JSON
+ | castTypeName = REAL
+ | castTypeName = DOUBLE PRECISION
+ | castTypeName = FLOAT precision?
+ ;
+
+positionFunction
+ : POSITION LP_ expr IN expr RP_
+ ;
+
+substringFunction
+ : (SUBSTRING | SUBSTR) LP_ expr FROM NUMBER_ (FOR NUMBER_)? RP_
+ | (SUBSTRING | SUBSTR) LP_ expr COMMA_ NUMBER_ (COMMA_ NUMBER_)? RP_
+ ;
+
+extractFunction
+ : EXTRACT LP_ identifier FROM expr RP_
+ ;
+
+charFunction
+ : CHAR LP_ expr (COMMA_ expr)* (USING charsetName)? RP_
+ ;
+
+trimFunction
+ : TRIM LP_ ((LEADING | BOTH | TRAILING) expr? FROM)? expr RP_
+ | TRIM LP_ (expr FROM)? expr RP_
+ ;
+
+valuesFunction
+ : VALUES LP_ columnRefList RP_
+ ;
+
+weightStringFunction
+ : WEIGHT_STRING LP_ expr (AS dataType)? levelClause? RP_
+ ;
+
+levelClause
+ : LEVEL (levelInWeightListElement (COMMA_ levelInWeightListElement)* | NUMBER_ MINUS_ NUMBER_)
+ ;
+
+levelInWeightListElement
+ : NUMBER_ direction? REVERSE?
+ ;
+
+regularFunction
+ : completeRegularFunction
+ | shorthandRegularFunction
+ ;
+
+shorthandRegularFunction
+ : CURRENT_DATE | CURRENT_TIME (LP_ NUMBER_? RP_)? | CURRENT_TIMESTAMP | LAST_DAY | LOCALTIME | LOCALTIMESTAMP
+ ;
+
+completeRegularFunction
+ : regularFunctionName (LP_ (expr (COMMA_ expr)* | ASTERISK_)? RP_)
+ ;
+
+regularFunctionName
+ : IF | LOCALTIME | LOCALTIMESTAMP | REPLACE | INSERT | INTERVAL | MOD
+ | DATABASE | SCHEMA | LEFT | RIGHT | DATE | DAY | GEOMETRYCOLLECTION | REPEAT
+ | LINESTRING | MULTILINESTRING | MULTIPOINT | MULTIPOLYGON | POINT | POLYGON
+ | TIME | TIMESTAMP | TIMESTAMP_ADD | TIMESTAMP_DIFF | DATE | CURRENT_TIMESTAMP
+ | CURRENT_DATE | CURRENT_TIME | UTC_TIMESTAMP | identifier
+ ;
+
+matchExpression
+ : MATCH (columnRefList | LP_ columnRefList RP_ ) AGAINST LP_ expr matchSearchModifier? RP_
+ ;
+
+matchSearchModifier
+ : IN NATURAL LANGUAGE MODE | IN NATURAL LANGUAGE MODE WITH QUERY EXPANSION | IN BOOLEAN MODE | WITH QUERY EXPANSION
+ ;
+
+caseExpression
+ : CASE simpleExpr? caseWhen+ caseElse? END
+ ;
+
+datetimeExpr
+ : expr
+ ;
+
+binaryLogFileIndexNumber
+ : NUMBER_
+ ;
+
+caseWhen
+ : WHEN expr THEN expr
+ ;
+
+caseElse
+ : ELSE expr
+ ;
+
+intervalExpression
+ : INTERVAL intervalValue
+ ;
+
+intervalValue
+ : expr intervalUnit
+ ;
+
+intervalUnit
+ : MICROSECOND | SECOND | MINUTE | HOUR | DAY | WEEK | MONTH
+ | QUARTER | YEAR | SECOND_MICROSECOND | MINUTE_MICROSECOND | MINUTE_SECOND | HOUR_MICROSECOND | HOUR_SECOND
+ | HOUR_MINUTE | DAY_MICROSECOND | DAY_SECOND | DAY_MINUTE | DAY_HOUR | YEAR_MONTH
+ ;
+
+subquery
+ : 'refer subquery in DMStement.g4'
+ ;
+
+orderByClause
+ : ORDER BY orderByItem (COMMA_ orderByItem)*
+ ;
+
+orderByItem
+ : (numberLiterals | expr) direction?
+ ;
+
+dataType
+ : dataTypeName = (INTEGER | INT | TINYINT | SMALLINT | MIDDLEINT | MEDIUMINT | BIGINT) fieldLength? fieldOptions?
+ | (dataTypeName = REAL | dataTypeName = DOUBLE PRECISION?) precision? fieldOptions?
+ | dataTypeName = (FLOAT | DECIMAL | NUMERIC | FIXED) (fieldLength | precision)? fieldOptions?
+ | dataTypeName = BIT fieldLength?
+ | dataTypeName = (BOOL | BOOLEAN)
+ | dataTypeName = CHAR fieldLength? charsetWithOptBinary?
+ | (dataTypeName = NCHAR | dataTypeName = NATIONAL_CHAR) fieldLength? BINARY?
+ | dataTypeName = (SIGNED | SIGNED_INT | SIGNED_INTEGER)
+ | dataTypeName = BINARY fieldLength?
+ | (dataTypeName = CHAR_VARYING | dataTypeName = CHARACTER_VARYING | dataTypeName = VARCHAR) fieldLength charsetWithOptBinary?
+ | (dataTypeName = NATIONAL VARCHAR | dataTypeName = NVARCHAR | dataTypeName = NCHAR VARCHAR | dataTypeName = NATIONAL_CHAR_VARYING | dataTypeName = NCHAR VARYING) fieldLength BINARY?
+ | dataTypeName = VARBINARY fieldLength?
+ | dataTypeName = YEAR fieldLength? fieldOptions?
+ | dataTypeName = DATE
+ | dataTypeName = TIME typeDatetimePrecision?
+ | dataTypeName = (UNSIGNED | UNSIGNED_INT | UNSIGNED_INTEGER)
+ | dataTypeName = TIMESTAMP typeDatetimePrecision?
+ | dataTypeName = DATETIME typeDatetimePrecision?
+ | dataTypeName = TINYBLOB
+ | dataTypeName = BLOB fieldLength?
+ | dataTypeName = (MEDIUMBLOB | LONGBLOB)
+ | dataTypeName = LONG VARBINARY
+ | dataTypeName = (LONG_CHAR_VARYING | LONG_VARCHAR)? charsetWithOptBinary?
+ | dataTypeName = TINYTEXT charsetWithOptBinary?
+ | dataTypeName = TEXT fieldLength? charsetWithOptBinary?
+ | dataTypeName = MEDIUMTEXT charsetWithOptBinary?
+ | dataTypeName = LONGTEXT charsetWithOptBinary?
+ | dataTypeName = ENUM stringList charsetWithOptBinary?
+ | dataTypeName = SET stringList charsetWithOptBinary?
+ | dataTypeName = (SERIAL | JSON | GEOMETRY | GEOMCOLLECTION | GEOMETRYCOLLECTION | POINT | MULTIPOINT | LINESTRING | MULTILINESTRING | POLYGON | MULTIPOLYGON)
+ ;
+
+stringList
+ : LP_ textString (COMMA_ textString)* RP_
+ ;
+
+textString
+ : string_
+ | HEX_DIGIT_
+ | BIT_NUM_
+ ;
+
+textStringHash
+ : string_ | HEX_DIGIT_
+ ;
+
+fieldOptions
+ : (UNSIGNED | SIGNED | ZEROFILL)+
+ ;
+
+precision
+ : LP_ NUMBER_ COMMA_ NUMBER_ RP_
+ ;
+
+typeDatetimePrecision
+ : LP_ NUMBER_ RP_
+ ;
+
+charsetWithOptBinary
+ : ascii
+ | unicode
+ | BYTE
+ | charset charsetName BINARY?
+ | BINARY (charset charsetName)?
+ ;
+
+ascii
+ : ASCII BINARY?
+ | BINARY ASCII
+ ;
+
+unicode
+ : UNICODE BINARY?
+ | BINARY UNICODE
+ ;
+
+charset
+ : (CHAR | CHARACTER) SET
+ | CHARSET
+ ;
+
+defaultCollation
+ : DEFAULT? COLLATE EQ_? collationName
+ ;
+
+defaultEncryption
+ : DEFAULT? ENCRYPTION EQ_? string_
+ ;
+
+defaultCharset
+ : DEFAULT? charset EQ_? charsetName
+ ;
+
+now
+ : (CURRENT_TIMESTAMP | LOCALTIME | LOCALTIMESTAMP) (LP_ NUMBER_? RP_)?
+ ;
+
+columnFormat
+ : FIXED
+ | DYNAMIC
+ | DEFAULT
+ ;
+
+storageMedia
+ : DISK
+ | MEMORY
+ | DEFAULT
+ ;
+
+direction
+ : ASC | DESC
+ ;
+
+keyOrIndex
+ : KEY | INDEX
+ ;
+
+fieldLength
+ : LP_ length=NUMBER_ RP_
+ ;
+
+characterSet
+ : charset charsetName
+ ;
+
+collateClause
+ : COLLATE (collationName | parameterMarker)
+ ;
+
+fieldOrVarSpec
+ : LP_ (identifier (COMMA_ identifier)*)? RP_
+ ;
+
+ifNotExists
+ : IF NOT EXISTS
+ ;
+
+ifExists
+ : IF EXISTS
+ ;
+
+connectionId
+ : NUMBER_
+ ;
+
+labelName
+ : identifier
+ ;
+
+cursorName
+ : identifier
+ ;
+
+conditionName
+ : identifier
+ ;
+
+combineOption
+ : ALL | DISTINCT
+ ;
+
+noWriteToBinLog
+ : LOCAL
+ | NO_WRITE_TO_BINLOG
+ ;
+
+channelOption
+ : FOR CHANNEL string_
+ ;
diff --git a/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Comments.g4 b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Comments.g4
new file mode 100644
index 0000000000000..006f57e0047d7
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Comments.g4
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+lexer grammar Comments;
+
+import Symbol;
+
+BLOCK_COMMENT: '/*' .*? '*/' -> channel(HIDDEN);
+INLINE_COMMENT: (('-- ' | '#') ~[\r\n]* ('\r'? '\n' | EOF) | '--' ('\r'? '\n' | EOF)) -> channel(HIDDEN);
diff --git a/parser/sql/dialect/hive/src/main/antlr4/imports/hive/DMLStatement.g4 b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/DMLStatement.g4
new file mode 100644
index 0000000000000..8af67a1dbed69
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/DMLStatement.g4
@@ -0,0 +1,324 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+grammar DMLStatement;
+
+import BaseRule;
+
+insert
+ : INSERT insertSpecification INTO? tableName partitionNames? (insertValuesClause | setAssignmentsClause | insertSelectClause) onDuplicateKeyClause?
+ ;
+
+insertSpecification
+ : (LOW_PRIORITY | DELAYED | HIGH_PRIORITY)? IGNORE?
+ ;
+
+insertValuesClause
+ : (LP_ fields? RP_ )? (VALUES | VALUE) (assignmentValues (COMMA_ assignmentValues)* | rowConstructorList) valueReference?
+ ;
+
+fields
+ : insertIdentifier (COMMA_ insertIdentifier)*
+ ;
+
+insertIdentifier
+ : columnRef | tableWild
+ ;
+
+tableWild
+ : identifier DOT_ (identifier DOT_)? ASTERISK_
+ ;
+
+insertSelectClause
+ : valueReference? (LP_ fields? RP_)? select
+ ;
+
+onDuplicateKeyClause
+ : (AS identifier)? ON DUPLICATE KEY UPDATE assignment (COMMA_ assignment)*
+ ;
+
+valueReference
+ : AS alias derivedColumns?
+ ;
+
+derivedColumns
+ : LP_ alias (COMMA_ alias)* RP_
+ ;
+
+update
+ : withClause? UPDATE updateSpecification_ tableReferences setAssignmentsClause whereClause? orderByClause? limitClause?
+ ;
+
+updateSpecification_
+ : LOW_PRIORITY? IGNORE?
+ ;
+
+assignment
+ : columnRef EQ_ assignmentValue
+ ;
+
+setAssignmentsClause
+ : valueReference? SET assignment (COMMA_ assignment)*
+ ;
+
+assignmentValues
+ : LP_ assignmentValue (COMMA_ assignmentValue)* RP_
+ | LP_ RP_
+ ;
+
+assignmentValue
+ : blobValue | expr | DEFAULT
+ ;
+
+blobValue
+ : UL_BINARY string_
+ ;
+
+delete
+ : DELETE deleteSpecification (singleTableClause | multipleTablesClause) whereClause? orderByClause? limitClause?
+ ;
+
+deleteSpecification
+ : LOW_PRIORITY? QUICK? IGNORE?
+ ;
+
+singleTableClause
+ : FROM tableName (AS? alias)? partitionNames?
+ ;
+
+multipleTablesClause
+ : tableAliasRefList FROM tableReferences | FROM tableAliasRefList USING tableReferences
+ ;
+
+select
+ : queryExpression lockClauseList?
+ | queryExpressionParens
+ | selectWithInto
+ ;
+
+selectWithInto
+ : LP_ selectWithInto RP_
+ | queryExpression selectIntoExpression lockClauseList?
+ | queryExpression lockClauseList selectIntoExpression
+ ;
+
+queryExpression
+ : withClause? (queryExpressionBody | queryExpressionParens) orderByClause? limitClause?
+ ;
+
+queryExpressionBody
+ : queryPrimary
+ | queryExpressionParens combineClause
+ | queryExpressionBody combineClause
+ ;
+
+combineClause
+ : UNION combineOption? (queryPrimary | queryExpressionParens)
+ | EXCEPT combineOption? (queryPrimary | queryExpressionParens)
+ ;
+
+queryExpressionParens
+ : LP_ (queryExpressionParens | queryExpression lockClauseList?) RP_
+ ;
+
+queryPrimary
+ : querySpecification
+ | tableValueConstructor
+ | tableStatement
+ ;
+
+querySpecification
+ : SELECT selectSpecification* projections selectIntoExpression? fromClause? whereClause? groupByClause? havingClause? windowClause?
+ ;
+
+tableStatement
+ : TABLE tableName
+ ;
+
+tableValueConstructor
+ : VALUES rowConstructorList
+ ;
+
+rowConstructorList
+ : ROW assignmentValues (COMMA_ ROW assignmentValues)*
+ ;
+
+withClause
+ : WITH RECURSIVE? cteClause (COMMA_ cteClause)*
+ ;
+
+cteClause
+ : identifier (LP_ columnNames RP_)? AS subquery
+ ;
+
+selectSpecification
+ : duplicateSpecification | HIGH_PRIORITY | STRAIGHT_JOIN | SQL_SMALL_RESULT | SQL_BIG_RESULT | SQL_BUFFER_RESULT | SQL_NO_CACHE | SQL_CALC_FOUND_ROWS
+ ;
+
+duplicateSpecification
+ : ALL | DISTINCT | DISTINCTROW
+ ;
+
+projections
+ : (unqualifiedShorthand | projection) (COMMA_ projection)*
+ ;
+
+projection
+ : expr (AS? alias)? | qualifiedShorthand
+ ;
+
+unqualifiedShorthand
+ : ASTERISK_
+ ;
+
+qualifiedShorthand
+ : (identifier DOT_)? identifier DOT_ASTERISK_
+ ;
+
+fromClause
+ : FROM (DUAL | tableReferences)
+ ;
+
+tableReferences
+ : tableReference (COMMA_ tableReference)*
+ ;
+
+escapedTableReference
+ : tableFactor joinedTable*
+ ;
+
+tableReference
+ : (tableFactor | LBE_ OJ escapedTableReference RBE_) joinedTable*
+ ;
+
+tableFactor
+ : tableName partitionNames? (AS? alias)? indexHintList? | subquery AS? alias (LP_ columnNames RP_)? | LP_ tableReferences RP_
+ ;
+
+partitionNames
+ : PARTITION LP_ identifier (COMMA_ identifier)* RP_
+ ;
+
+indexHintList
+ : indexHint (COMMA_ indexHint)*
+ ;
+
+indexHint
+ : (USE | IGNORE | FORCE) (INDEX | KEY) (FOR (JOIN | ORDER BY | GROUP BY))? LP_ indexName (COMMA_ indexName)* RP_
+ ;
+
+joinedTable
+ : innerJoinType tableReference joinSpecification?
+ | outerJoinType tableReference joinSpecification
+ | naturalJoinType tableFactor
+ ;
+
+innerJoinType
+ : (INNER | CROSS)? JOIN
+ | STRAIGHT_JOIN
+ ;
+
+outerJoinType
+ : (LEFT | RIGHT) OUTER? JOIN
+ ;
+
+naturalJoinType
+ : NATURAL INNER? JOIN
+ | NATURAL (LEFT | RIGHT) OUTER? JOIN
+ ;
+
+joinSpecification
+ : ON expr | USING LP_ columnNames RP_
+ ;
+
+whereClause
+ : WHERE expr
+ ;
+
+groupByClause
+ : GROUP BY orderByItem (COMMA_ orderByItem)* (WITH ROLLUP)?
+ ;
+
+havingClause
+ : HAVING expr
+ ;
+
+limitClause
+ : LIMIT ((limitOffset COMMA_)? limitRowCount | limitRowCount OFFSET limitOffset)
+ ;
+
+limitRowCount
+ : numberLiterals | parameterMarker
+ ;
+
+limitOffset
+ : numberLiterals | parameterMarker
+ ;
+
+windowClause
+ : WINDOW windowItem (COMMA_ windowItem)*
+ ;
+
+windowItem
+ : identifier AS windowSpecification
+ ;
+
+subquery
+ : queryExpressionParens
+ ;
+
+selectLinesInto
+ : STARTING BY string_ | TERMINATED BY string_
+ ;
+
+selectFieldsInto
+ : TERMINATED BY string_ | OPTIONALLY? ENCLOSED BY string_ | ESCAPED BY string_
+ ;
+
+selectIntoExpression
+ : INTO variable (COMMA_ variable )* | INTO DUMPFILE string_
+ | (INTO OUTFILE string_ (CHARACTER SET charsetName)?(COLUMNS selectFieldsInto+)? (LINES selectLinesInto+)?)
+ ;
+
+lockClause
+ : FOR lockStrength tableLockingList? lockedRowAction?
+ | LOCK IN SHARE MODE
+ ;
+
+lockClauseList
+ : lockClause+
+ ;
+
+lockStrength
+ : UPDATE | SHARE
+ ;
+
+lockedRowAction
+ : SKIP_SYMBOL LOCKED | NOWAIT
+ ;
+
+tableLockingList
+ : OF tableAliasRefList
+ ;
+
+tableIdentOptWild
+ : tableName DOT_ASTERISK_?
+ ;
+
+tableAliasRefList
+ : tableIdentOptWild (COMMA_ tableIdentOptWild)*
+ ;
diff --git a/parser/sql/dialect/hive/src/main/antlr4/imports/hive/HiveKeyword.g4 b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/HiveKeyword.g4
new file mode 100644
index 0000000000000..2d1bcdcd64b01
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/HiveKeyword.g4
@@ -0,0 +1,3144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+lexer grammar HiveKeyword;
+
+import Alphabet;
+
+ACCESSIBLE
+ : A C C E S S I B L E
+ ;
+
+ACCOUNT
+ : A C C O U N T
+ ;
+
+ACTION
+ : A C T I O N
+ ;
+
+ACTIVE
+ : A C T I V E
+ ;
+
+ADD
+ : A D D
+ ;
+
+ADMIN
+ : A D M I N
+ ;
+
+AFTER
+ : A F T E R
+ ;
+
+AGAINST
+ : A G A I N S T
+ ;
+
+AGGREGATE
+ : A G G R E G A T E
+ ;
+
+ALGORITHM
+ : A L G O R I T H M
+ ;
+
+ALL
+ : A L L
+ ;
+
+ALTER
+ : A L T E R
+ ;
+
+ALWAYS
+ : A L W A Y S
+ ;
+
+ANALYZE
+ : A N A L Y Z E
+ ;
+
+AND
+ : A N D
+ ;
+
+ANY
+ : A N Y
+ ;
+
+ARRAY
+ : A R R A Y
+ ;
+
+AS
+ : A S
+ ;
+
+ASC
+ : A S C
+ ;
+
+ASCII
+ : A S C I I
+ ;
+
+ASENSITIVE
+ : A S E N S I T I V E
+ ;
+
+AT
+ : A T
+ ;
+
+ATTRIBUTE
+ : A T T R I B U T E
+ ;
+
+AUTOEXTEND_SIZE
+ : A U T O E X T E N D UL_ S I Z E
+ ;
+
+AUTO_INCREMENT
+ : A U T O UL_ I N C R E M E N T
+ ;
+
+AVG
+ : A V G
+ ;
+
+ASSIGN_GTIDS_TO_ANONYMOUS_TRANSACTIONS
+ : A S S I G N UL_ G T I D S UL_ T O UL_ A N O N Y M O U S UL_ T R A N S A C T I O N S
+ ;
+
+BIT_XOR
+ : B I T UL_ X O R
+ ;
+
+AVG_ROW_LENGTH
+ : A V G UL_ R O W UL_ L E N G T H
+ ;
+
+BACKUP
+ : B A C K U P
+ ;
+
+BEFORE
+ : B E F O R E
+ ;
+
+BEGIN
+ : B E G I N
+ ;
+
+BETWEEN
+ : B E T W E E N
+ ;
+
+BIGINT
+ : B I G I N T
+ ;
+
+BINARY
+ : B I N A R Y
+ ;
+
+BINLOG
+ : B I N L O G
+ ;
+
+BIT
+ : B I T
+ ;
+
+BLOB
+ : B L O B
+ ;
+
+BLOCK
+ : B L O C K
+ ;
+
+BOOL
+ : B O O L
+ ;
+
+BOOLEAN
+ : B O O L E A N
+ ;
+
+BOTH
+ : B O T H
+ ;
+
+BTREE
+ : B T R E E
+ ;
+
+BUCKETS
+ : B U C K E T S
+ ;
+
+BY
+ : B Y
+ ;
+
+BYTE
+ : B Y T E
+ ;
+
+CACHE
+ : C A C H E
+ ;
+
+CALL
+ : C A L L
+ ;
+
+CASCADE
+ : C A S C A D E
+ ;
+
+CASCADED
+ : C A S C A D E D
+ ;
+
+CASE
+ : C A S E
+ ;
+
+CATALOG_NAME
+ : C A T A L O G UL_ N A M E
+ ;
+
+CHAIN
+ : C H A I N
+ ;
+
+CHANGE
+ : C H A N G E
+ ;
+
+CHANGED
+ : C H A N G E D
+ ;
+
+CHANNEL
+ : C H A N N E L
+ ;
+
+CHAR
+ : C H A R
+ ;
+
+CHAR_VARYING
+ : CHAR ' ' VARYING
+ ;
+
+CHARACTER
+ : C H A R A C T E R
+ ;
+
+CHARACTER_VARYING
+ : CHARACTER ' ' VARYING
+ ;
+
+CHARSET
+ : C H A R S E T
+ ;
+
+CHECK
+ : C H E C K
+ ;
+
+CHECKSUM
+ : C H E C K S U M
+ ;
+
+CIPHER
+ : C I P H E R
+ ;
+
+CLASS_ORIGIN
+ : C L A S S UL_ O R I G I N
+ ;
+
+CLIENT
+ : C L I E N T
+ ;
+
+CLONE
+ : C L O N E
+ ;
+
+CLOSE
+ : C L O S E
+ ;
+
+COALESCE
+ : C O A L E S C E
+ ;
+
+CODE
+ : C O D E
+ ;
+
+COLLATE
+ : C O L L A T E
+ ;
+
+COLLATION
+ : C O L L A T I O N
+ ;
+
+COLUMN
+ : C O L U M N
+ ;
+
+COLUMNS
+ : C O L U M N S
+ ;
+
+COLUMN_FORMAT
+ : C O L U M N UL_ F O R M A T
+ ;
+
+COLUMN_NAME
+ : C O L U M N UL_ N A M E
+ ;
+
+COMMENT
+ : C O M M E N T
+ ;
+
+COMMIT
+ : C O M M I T
+ ;
+
+COMMITTED
+ : C O M M I T T E D
+ ;
+
+COMPACT
+ : C O M P A C T
+ ;
+
+COMPLETION
+ : C O M P L E T I O N
+ ;
+
+COMPONENT
+ : C O M P O N E N T
+ ;
+
+COMPRESSED
+ : C O M P R E S S E D
+ ;
+
+COMPRESSION
+ : C O M P R E S S I O N
+ ;
+
+CONCURRENT
+ : C O N C U R R E N T
+ ;
+
+CONDITION
+ : C O N D I T I O N
+ ;
+
+CONNECTION
+ : C O N N E C T I O N
+ ;
+
+CONSISTENT
+ : C O N S I S T E N T
+ ;
+
+CONSTRAINT
+ : C O N S T R A I N T
+ ;
+
+CONSTRAINT_CATALOG
+ : C O N S T R A I N T UL_ C A T A L O G
+ ;
+
+CONSTRAINT_NAME
+ : C O N S T R A I N T UL_ N A M E
+ ;
+
+CONSTRAINT_SCHEMA
+ : C O N S T R A I N T UL_ S C H E M A
+ ;
+
+CONTAINS
+ : C O N T A I N S
+ ;
+
+CONTEXT
+ : C O N T E X T
+ ;
+
+CONTINUE
+ : C O N T I N U E
+ ;
+
+CONVERT
+ : C O N V E R T
+ ;
+
+CPU
+ : C P U
+ ;
+
+CREATE
+ : C R E A T E
+ ;
+
+CROSS
+ : C R O S S
+ ;
+
+CUBE
+ : C U B E
+ ;
+
+CUME_DIST
+ : C U M E UL_ D I S T
+ ;
+
+CURRENT
+ : C U R R E N T
+ ;
+
+CURRENT_DATE
+ : C U R R E N T UL_ D A T E
+ ;
+
+CURRENT_TIME
+ : C U R R E N T UL_ T I M E
+ ;
+
+CURRENT_TIMESTAMP
+ : C U R R E N T UL_ T I M E S T A M P
+ ;
+
+CURRENT_USER
+ : C U R R E N T UL_ U S E R
+ ;
+
+CURSOR
+ : C U R S O R
+ ;
+
+CURSOR_NAME
+ : C U R S O R UL_ N A M E
+ ;
+
+DATA
+ : D A T A
+ ;
+
+DATABASE
+ : D A T A B A S E
+ ;
+
+DATABASES
+ : D A T A B A S E S
+ ;
+
+DATAFILE
+ : D A T A F I L E
+ ;
+
+DATE
+ : D A T E
+ ;
+
+DATETIME
+ : D A T E T I M E
+ ;
+
+DAY
+ : D A Y
+ ;
+
+DAY_HOUR
+ : D A Y UL_ H O U R
+ ;
+
+DAY_MICROSECOND
+ : D A Y UL_ M I C R O S E C O N D
+ ;
+
+DAY_MINUTE
+ : D A Y UL_ M I N U T E
+ ;
+
+DAY_SECOND
+ : D A Y UL_ S E C O N D
+ ;
+
+DEALLOCATE
+ : D E A L L O C A T E
+ ;
+
+DEC
+ : D E C
+ ;
+
+DECIMAL
+ : D E C I M A L
+ ;
+
+DECLARE
+ : D E C L A R E
+ ;
+
+DEFAULT
+ : D E F A U L T
+ ;
+
+DEFAULT_AUTH
+ : D E F A U L T UL_ A U T H
+ ;
+
+DEFINER
+ : D E F I N E R
+ ;
+
+DEFINITION
+ : D E F I N I T I O N
+ ;
+
+DELAYED
+ : D E L A Y E D
+ ;
+
+DELAY_KEY_WRITE
+ : D E L A Y UL_ K E Y UL_ W R I T E
+ ;
+
+DELETE
+ : D E L E T E
+ ;
+
+DENSE_RANK
+ : D E N S E UL_ R A N K
+ ;
+
+DESC
+ : D E S C
+ ;
+
+DESCRIBE
+ : D E S C R I B E
+ ;
+
+DESCRIPTION
+ : D E S C R I P T I O N
+ ;
+
+DETERMINISTIC
+ : D E T E R M I N I S T I C
+ ;
+
+DIAGNOSTICS
+ : D I A G N O S T I C S
+ ;
+
+DIRECTORY
+ : D I R E C T O R Y
+ ;
+
+DISABLE
+ : D I S A B L E
+ ;
+
+DISCARD
+ : D I S C A R D
+ ;
+
+DISK
+ : D I S K
+ ;
+
+DISTINCT
+ : D I S T I N C T
+ ;
+
+DISTINCTROW
+ : D I S T I N C T R O W
+ ;
+
+DIV
+ : D I V
+ ;
+
+DO
+ : D O
+ ;
+
+DOUBLE
+ : D O U B L E
+ ;
+
+DROP
+ : D R O P
+ ;
+
+DUAL
+ : D U A L
+ ;
+
+DUMPFILE
+ : D U M P F I L E
+ ;
+
+DUPLICATE
+ : D U P L I C A T E
+ ;
+
+DYNAMIC
+ : D Y N A M I C
+ ;
+
+EACH
+ : E A C H
+ ;
+
+ELSE
+ : E L S E
+ ;
+
+ELSEIF
+ : E L S E I F
+ ;
+
+EMPTY
+ : E M P T Y
+ ;
+
+ENABLE
+ : E N A B L E
+ ;
+
+ENCLOSED
+ : E N C L O S E D
+ ;
+
+ENCRYPTION
+ : E N C R Y P T I O N
+ ;
+
+END
+ : E N D
+ ;
+
+ENDS
+ : E N D S
+ ;
+
+ENFORCED
+ : E N F O R C E D
+ ;
+
+ENGINE
+ : E N G I N E
+ ;
+
+ENGINES
+ : E N G I N E S
+ ;
+
+ENGINE_ATTRIBUTE
+ : E N G I N E UL_ A T T R I B U T E
+ ;
+
+ENUM
+ : E N U M
+ ;
+
+ERROR
+ : E R R O R
+ ;
+
+ERRORS
+ : E R R O R S
+ ;
+
+ESCAPE
+ : E S C A P E
+ ;
+
+ESCAPED
+ : E S C A P E D
+ ;
+
+EVENT
+ : E V E N T
+ ;
+
+EVENTS
+ : E V E N T S
+ ;
+
+EVERY
+ : E V E R Y
+ ;
+
+EXCEPT
+ : E X C E P T
+ ;
+
+EXCHANGE
+ : E X C H A N G E
+ ;
+
+EXCLUDE
+ : E X C L U D E
+ ;
+
+EXECUTE
+ : E X E C U T E
+ ;
+
+EXISTS
+ : E X I S T S
+ ;
+
+EXIT
+ : E X I T
+ ;
+
+EXPANSION
+ : E X P A N S I O N
+ ;
+
+EXPIRE
+ : E X P I R E
+ ;
+
+EXPLAIN
+ : E X P L A I N
+ ;
+
+EXPORT
+ : E X P O R T
+ ;
+
+EXTENDED
+ : E X T E N D E D
+ ;
+
+EXTENT_SIZE
+ : E X T E N T UL_ S I Z E
+ ;
+
+FAILED_LOGIN_ATTEMPTS
+ : F A I L E D UL_ L O G I N UL_ A T T E M P T S
+ ;
+
+FALSE
+ : F A L S E
+ ;
+
+FAST
+ : F A S T
+ ;
+
+FAULTS
+ : F A U L T S
+ ;
+
+FETCH
+ : F E T C H
+ ;
+
+FIELDS
+ : F I E L D S -> type(COLUMNS)
+ ;
+
+FILE
+ : F I L E
+ ;
+
+FILE_BLOCK_SIZE
+ : F I L E UL_ B L O C K UL_ S I Z E
+ ;
+
+FILTER
+ : F I L T E R
+ ;
+
+FIRST
+ : F I R S T
+ ;
+
+FIRST_VALUE
+ : F I R S T UL_ V A L U E
+ ;
+
+FIXED
+ : F I X E D
+ ;
+
+FLOAT
+ : F L O A T
+ ;
+
+FLOAT4
+ : F L O A T '4'
+ ;
+
+FLOAT8
+ : F L O A T '8'
+ ;
+
+FLUSH
+ : F L U S H
+ ;
+
+FOLLOWING
+ : F O L L O W I N G
+ ;
+
+FOLLOWS
+ : F O L L O W S
+ ;
+
+FOR
+ : F O R
+ ;
+
+FORCE
+ : F O R C E
+ ;
+
+FOREIGN
+ : F O R E I G N
+ ;
+
+FORMAT
+ : F O R M A T
+ ;
+
+FOUND
+ : F O U N D
+ ;
+
+FROM
+ : F R O M
+ ;
+
+FULL
+ : F U L L
+ ;
+
+FULLTEXT
+ : F U L L T E X T
+ ;
+
+FUNCTION
+ : F U N C T I O N
+ ;
+
+GENERAL
+ : G E N E R A L
+ ;
+
+GENERATED
+ : G E N E R A T E D
+ ;
+
+GEOMETRY
+ : G E O M E T R Y
+ ;
+
+GEOMCOLLECTION
+ : G E O M C O L L E C T I O N
+ ;
+
+GEOMETRYCOLLECTION
+ : G E O M E T R Y C O L L E C T I O N
+ ;
+
+GET
+ : G E T
+ ;
+
+GET_FORMAT
+ : G E T UL_ F O R M A T
+ ;
+
+GET_MASTER_PUBLIC_KEY
+ : G E T UL_ M A S T E R UL_ P U B L I C UL_ K E Y
+ ;
+
+GLOBAL
+ : G L O B A L
+ ;
+
+GRANT
+ : G R A N T
+ ;
+
+GRANTS
+ : G R A N T S
+ ;
+
+GROUP
+ : G R O U P
+ ;
+
+GROUPING
+ : G R O U P I N G
+ ;
+
+GROUPS
+ : G R O U P S
+ ;
+
+GROUP_REPLICATION
+ : G R O U P UL_ R E P L I C A T I O N
+ ;
+
+GET_SOURCE_PUBLIC_KEY
+ : G E T UL_ S O U R C E UL_ P U B L I C UL_ K E Y
+ ;
+
+GTID_ONLY
+ : G T I D UL_ O N L Y
+ ;
+
+GENERATE
+ : G E N E R A T E
+ ;
+
+HANDLER
+ : H A N D L E R
+ ;
+
+HASH
+ : H A S H
+ ;
+
+HAVING
+ : H A V I N G
+ ;
+
+HELP
+ : H E L P
+ ;
+
+HIGH_PRIORITY
+ : H I G H UL_ P R I O R I T Y
+ ;
+
+HISTOGRAM
+ : H I S T O G R A M
+ ;
+
+HISTORY
+ : H I S T O R Y
+ ;
+
+HOST
+ : H O S T
+ ;
+
+HOSTS
+ : H O S T S
+ ;
+
+HOUR
+ : H O U R
+ ;
+
+HOUR_MICROSECOND
+ : H O U R UL_ M I C R O S E C O N D
+ ;
+
+HOUR_MINUTE
+ : H O U R UL_ M I N U T E
+ ;
+
+HOUR_SECOND
+ : H O U R UL_ S E C O N D
+ ;
+
+IDENTIFIED
+ : I D E N T I F I E D
+ ;
+
+IF
+ : I F
+ ;
+
+IGNORE
+ : I G N O R E
+ ;
+
+IGNORE_SERVER_IDS
+ : I G N O R E UL_ S E R V E R UL_ I D S
+ ;
+
+IMPORT
+ : I M P O R T
+ ;
+
+IN
+ : I N
+ ;
+
+INACTIVE
+ : I N A C T I V E
+ ;
+
+INDEX
+ : I N D E X
+ ;
+
+INDEXES
+ : I N D E X E S
+ ;
+
+INFILE
+ : I N F I L E
+ ;
+
+INITIAL_SIZE
+ : I N I T I A L UL_ S I Z E
+ ;
+
+INNER
+ : I N N E R
+ ;
+
+INOUT
+ : I N O U T
+ ;
+
+INSENSITIVE
+ : I N S E N S I T I V E
+ ;
+
+INSERT
+ : I N S E R T
+ ;
+
+INSERT_METHOD
+ : I N S E R T UL_ M E T H O D
+ ;
+
+INSTALL
+ : I N S T A L L
+ ;
+
+INSTANCE
+ : I N S T A N C E
+ ;
+
+INT
+ : I N T
+ ;
+
+INT1
+ : I N T '1'
+ ;
+
+INT2
+ : I N T '2'
+ ;
+
+INT3
+ : I N T '3'
+ ;
+
+INT4
+ : I N T '4'
+ ;
+
+INT8
+ : I N T '8'
+ ;
+
+INTEGER
+ : I N T E G E R
+ ;
+
+INTERVAL
+ : I N T E R V A L
+ ;
+
+INTO
+ : I N T O
+ ;
+
+INVISIBLE
+ : I N V I S I B L E
+ ;
+
+INVOKER
+ : I N V O K E R
+ ;
+
+IO
+ : I O
+ ;
+
+IO_AFTER_GTIDS
+ : I O UL_ A F T E R UL_ G T I D S
+ ;
+
+IO_BEFORE_GTIDS
+ : I O UL_ B E F O R E UL_ G T I D S
+ ;
+
+IO_THREAD
+ : I O UL_ T H R E A D -> type(RELAY_THREAD)
+ ;
+
+IPC
+ : I P C
+ ;
+
+IS
+ : I S
+ ;
+
+ISOLATION
+ : I S O L A T I O N
+ ;
+
+ISSUER
+ : I S S U E R
+ ;
+
+ITERATE
+ : I T E R A T E
+ ;
+
+JOIN
+ : J O I N
+ ;
+
+JSON
+ : J S O N
+ ;
+
+JSON_TABLE
+ : J S O N UL_ T A B L E
+ ;
+
+JSON_VALUE
+ : J S O N UL_ V A L U E
+ ;
+
+KEY
+ : K E Y
+ ;
+
+KEYS
+ : K E Y S
+ ;
+
+KEY_BLOCK_SIZE
+ : K E Y UL_ B L O C K UL_ S I Z E
+ ;
+
+KILL
+ : K I L L
+ ;
+
+LAG
+ : L A G
+ ;
+
+LANGUAGE
+ : L A N G U A G E
+ ;
+
+LAST
+ : L A S T
+ ;
+
+LAST_VALUE
+ : L A S T UL_ V A L U E
+ ;
+
+LATERAL
+ : L A T E R A L
+ ;
+
+LEAD
+ : L E A D
+ ;
+
+LEADING
+ : L E A D I N G
+ ;
+
+LEAVE
+ : L E A V E
+ ;
+
+LEAVES
+ : L E A V E S
+ ;
+
+LEFT
+ : L E F T
+ ;
+
+LESS
+ : L E S S
+ ;
+
+LEVEL
+ : L E V E L
+ ;
+
+LIKE
+ : L I K E
+ ;
+
+LIMIT
+ : L I M I T
+ ;
+
+LINEAR
+ : L I N E A R
+ ;
+
+LINES
+ : L I N E S
+ ;
+
+LINESTRING
+ : L I N E S T R I N G
+ ;
+
+LIST
+ : L I S T
+ ;
+
+LOAD
+ : L O A D
+ ;
+
+LOCAL
+ : L O C A L
+ ;
+
+LOCALTIME
+ : L O C A L T I M E
+ ;
+
+LOCALTIMESTAMP
+ : L O C A L T I M E S T A M P
+ ;
+
+LOCK
+ : L O C K
+ ;
+
+LOCKED
+ : L O C K E D
+ ;
+
+LOCKS
+ : L O C K S
+ ;
+
+LOGFILE
+ : L O G F I L E
+ ;
+
+LOGS
+ : L O G S
+ ;
+
+LONG
+ : L O N G
+ ;
+
+LONGBLOB
+ : L O N G B L O B
+ ;
+
+LONGTEXT
+ : L O N G T E X T
+ ;
+
+LONG_CHAR_VARYING
+ : LONG ' ' CHAR ' ' VARYING
+ ;
+
+LONG_VARCHAR
+ : LONG ' ' VARCHAR
+ ;
+
+LOOP
+ : L O O P
+ ;
+
+LOW_PRIORITY
+ : L O W UL_ P R I O R I T Y
+ ;
+
+MASTER
+ : M A S T E R
+ ;
+
+MASTER_AUTO_POSITION
+ : M A S T E R UL_ A U T O UL_ P O S I T I O N
+ ;
+
+MASTER_BIND
+ : M A S T E R UL_ B I N D
+ ;
+
+MASTER_COMPRESSION_ALGORITHM
+ : M A S T E R UL_ C O M P R E S S I O N UL_ A L G O R I T H M S
+ ;
+
+MASTER_CONNECT_RETRY
+ : M A S T E R UL_ C O N N E C T UL_ R E T R Y
+ ;
+
+MASTER_DELAY
+ : M A S T E R UL_ D E L A Y
+ ;
+
+MASTER_HEARTBEAT_PERIOD
+ : M A S T E R UL_ H E A R T B E A T UL_ P E R I O D
+ ;
+
+MASTER_HOST
+ : M A S T E R UL_ H O S T
+ ;
+
+MASTER_LOG_FILE
+ : M A S T E R UL_ L O G UL_ F I L E
+ ;
+
+MASTER_LOG_POS
+ : M A S T E R UL_ L O G UL_ P O S
+ ;
+
+MASTER_PASSWORD
+ : M A S T E R UL_ P A S S W O R D
+ ;
+
+MASTER_PORT
+ : M A S T E R UL_ P O R T
+ ;
+
+MASTER_PUBLIC_KEY_PATH
+ : M A S T E R UL_ P U B L I C UL_ K E Y UL_ P A T H
+ ;
+
+MASTER_RETRY_COUNT
+ : M A S T E R UL_ R E T R Y UL_ C O U N T
+ ;
+
+MASTER_SERVER_ID
+ : M A S T E R UL_ S E R V E R UL_ I D
+ ;
+
+MASTER_SSL
+ : M A S T E R UL_ S S L
+ ;
+
+MASTER_SSL_CA
+ : M A S T E R UL_ S S L UL_ C A
+ ;
+
+MASTER_SSL_CAPATH
+ : M A S T E R UL_ S S L UL_ C A P A T H
+ ;
+
+MASTER_SSL_CERT
+ : M A S T E R UL_ S S L UL_ C E R T
+ ;
+
+MASTER_SSL_CIPHER
+ : M A S T E R UL_ S S L UL_ C I P H E R
+ ;
+
+MASTER_SSL_CRL
+ : M A S T E R UL_ S S L UL_ C R L
+ ;
+
+MASTER_SSL_CRLPATH
+ : M A S T E R UL_ S S L UL_ C R L P A T H
+ ;
+
+MASTER_SSL_KEY
+ : M A S T E R UL_ S S L UL_ K E Y
+ ;
+
+MASTER_SSL_VERIFY_SERVER_CERT
+ : M A S T E R UL_ S S L UL_ V E R I F Y UL_ S E R V E R UL_ C E R T
+ ;
+
+MASTER_TLS_CIPHERSUITES
+ : M A S T E R UL_ T L S UL_ C I P H E R S U I T E S
+ ;
+
+MASTER_TLS_VERSION
+ : M A S T E R UL_ T L S UL_ V E R S I O N
+ ;
+
+MASTER_USER
+ : M A S T E R UL_ U S E R
+ ;
+
+MASTER_ZSTD_COMPRESSION_LEVEL
+ : M A S T E R UL_ Z S T D UL_ C O M P R E S S I O N UL_ L E V E L
+ ;
+
+MATCH
+ : M A T C H
+ ;
+
+MAXVALUE
+ : M A X V A L U E
+ ;
+
+MAX_CONNECTIONS_PER_HOUR
+ : M A X UL_ C O N N E C T I O N S UL_ P E R UL_ H O U R
+ ;
+
+MAX_QUERIES_PER_HOUR
+ : M A X UL_ Q U E R I E S UL_ P E R UL_ H O U R
+ ;
+
+MAX_ROWS
+ : M A X UL_ R O W S
+ ;
+
+MAX_SIZE
+ : M A X UL_ S I Z E
+ ;
+
+MAX_UPDATES_PER_HOUR
+ : M A X UL_ U P D A T E S UL_ P E R UL_ H O U R
+ ;
+
+MAX_USER_CONNECTIONS
+ : M A X UL_ U S E R UL_ C O N N E C T I O N S
+ ;
+
+MEDIUM
+ : M E D I U M
+ ;
+
+MEDIUMBLOB
+ : M E D I U M B L O B
+ ;
+
+MEDIUMINT
+ : M E D I U M I N T
+ ;
+
+MEDIUMTEXT
+ : M E D I U M T E X T
+ ;
+
+MEMBER
+ : M E M B E R
+ ;
+
+MEMORY
+ : M E M O R Y
+ ;
+
+MERGE
+ : M E R G E
+ ;
+
+MESSAGE_TEXT
+ : M E S S A G E UL_ T E X T
+ ;
+
+MICROSECOND
+ : M I C R O S E C O N D
+ ;
+
+MIDDLEINT
+ : M I D D L E I N T
+ ;
+
+MIGRATE
+ : M I G R A T E
+ ;
+
+MINUTE
+ : M I N U T E
+ ;
+
+MINUTE_MICROSECOND
+ : M I N U T E UL_ M I C R O S E C O N D
+ ;
+
+MINUTE_SECOND
+ : M I N U T E UL_ S E C O N D
+ ;
+
+MIN_ROWS
+ : M I N UL_ R O W S
+ ;
+
+MOD
+ : M O D
+ ;
+
+MODE
+ : M O D E
+ ;
+
+MODIFIES
+ : M O D I F I E S
+ ;
+
+MODIFY
+ : M O D I F Y
+ ;
+
+MONTH
+ : M O N T H
+ ;
+
+MULTILINESTRING
+ : M U L T I L I N E S T R I N G
+ ;
+
+MULTIPOINT
+ : M U L T I P O I N T
+ ;
+
+MULTIPOLYGON
+ : M U L T I P O L Y G O N
+ ;
+
+MUTEX
+ : M U T E X
+ ;
+
+MYSQL_ERRNO
+ : M Y S Q L UL_ E R R N O
+ ;
+
+NAME
+ : N A M E
+ ;
+
+NAMES
+ : N A M E S
+ ;
+
+NATIONAL
+ : N A T I O N A L
+ ;
+
+NATIONAL_CHAR
+ : NATIONAL ' ' CHAR
+ ;
+
+NATIONAL_CHAR_VARYING
+ : NATIONAL ' ' CHAR_VARYING
+ ;
+
+NATURAL
+ : N A T U R A L
+ ;
+
+NCHAR
+ : N C H A R
+ ;
+
+NDB
+ : N D B -> type(NDBCLUSTER)
+ ;
+
+NDBCLUSTER
+ : N D B C L U S T E R
+ ;
+
+NESTED
+ : N E S T E D
+ ;
+
+NETWORK_NAMESPACE
+ : N E T W O R K UL_ N A M E S P A C E
+ ;
+
+NEVER
+ : N E V E R
+ ;
+
+NEW
+ : N E W
+ ;
+
+NEXT
+ : N E X T
+ ;
+
+NO
+ : N O
+ ;
+
+NODEGROUP
+ : N O D E G R O U P
+ ;
+
+NONE
+ : N O N E
+ ;
+
+SHARED
+ : S H A R E D
+ ;
+
+EXCLUSIVE
+ : E X C L U S I V E
+ ;
+
+NOT
+ : N O T
+ ;
+
+NOWAIT
+ : N O W A I T
+ ;
+
+NO_WAIT
+ : N O UL_ W A I T
+ ;
+
+NO_WRITE_TO_BINLOG
+ : N O UL_ W R I T E UL_ T O UL_ B I N L O G
+ ;
+
+NTH_VALUE
+ : N T H UL_ V A L U E
+ ;
+
+NTILE
+ : N T I L E
+ ;
+
+NULL
+ : N U L L
+ ;
+
+NULLS
+ : N U L L S
+ ;
+
+NUMBER
+ : N U M B E R
+ ;
+
+NUMERIC
+ : N U M E R I C
+ ;
+
+NVARCHAR
+ : N V A R C H A R
+ ;
+
+OF
+ : O F
+ ;
+
+OFF
+ : O F F
+ ;
+
+OFFSET
+ : O F F S E T
+ ;
+
+OJ
+ : O J
+ ;
+
+OLD
+ : O L D
+ ;
+
+ON
+ : O N
+ ;
+
+ONE
+ : O N E
+ ;
+
+ONLY
+ : O N L Y
+ ;
+
+OPEN
+ : O P E N
+ ;
+
+OPTIMIZE
+ : O P T I M I Z E
+ ;
+
+OPTIMIZER_COSTS
+ : O P T I M I Z E R UL_ C O S T S
+ ;
+
+OPTION
+ : O P T I O N
+ ;
+
+OPTIONAL
+ : O P T I O N A L
+ ;
+
+OPTIONALLY
+ : O P T I O N A L L Y
+ ;
+
+OPTIONS
+ : O P T I O N S
+ ;
+
+OR
+ : O R
+ ;
+
+ORDER
+ : O R D E R
+ ;
+
+ORDINALITY
+ : O R D I N A L I T Y
+ ;
+
+ORGANIZATION
+ : O R G A N I Z A T I O N
+ ;
+
+OTHERS
+ : O T H E R S
+ ;
+
+OUT
+ : O U T
+ ;
+
+OUTER
+ : O U T E R
+ ;
+
+OUTFILE
+ : O U T F I L E
+ ;
+
+OVER
+ : O V E R
+ ;
+
+OWNER
+ : O W N E R
+ ;
+
+PACK_KEYS
+ : P A C K UL_ K E Y S
+ ;
+
+PAGE
+ : P A G E
+ ;
+
+PARSER
+ : P A R S E R
+ ;
+
+PARTIAL
+ : P A R T I A L
+ ;
+
+PARTITION
+ : P A R T I T I O N
+ ;
+
+PARTITIONING
+ : P A R T I T I O N I N G
+ ;
+
+PARTITIONS
+ : P A R T I T I O N S
+ ;
+
+PASSWORD
+ : P A S S W O R D
+ ;
+
+PASSWORD_LOCK_TIME
+ : P A S S W O R D UL_ L O C K UL_ T I M E
+ ;
+
+PATH
+ : P A T H
+ ;
+
+PERCENT_RANK
+ : P E R C E N T UL_ R A N K
+ ;
+
+PERSIST
+ : P E R S I S T
+ ;
+
+PERSIST_ONLY
+ : P E R S I S T UL_ O N L Y
+ ;
+
+PHASE
+ : P H A S E
+ ;
+
+PLUGIN
+ : P L U G I N
+ ;
+
+PLUGINS
+ : P L U G I N S
+ ;
+
+PLUGIN_DIR
+ : P L U G I N UL_ D I R
+ ;
+
+POINT
+ : P O I N T
+ ;
+
+POLYGON
+ : P O L Y G O N
+ ;
+
+PORT
+ : P O R T
+ ;
+
+PRECEDES
+ : P R E C E D E S
+ ;
+
+PRECEDING
+ : P R E C E D I N G
+ ;
+
+PRECISION
+ : P R E C I S I O N
+ ;
+
+PREPARE
+ : P R E P A R E
+ ;
+
+PRESERVE
+ : P R E S E R V E
+ ;
+
+PREV
+ : P R E V
+ ;
+
+PRIMARY
+ : P R I M A R Y
+ ;
+
+PRIVILEGES
+ : P R I V I L E G E S
+ ;
+
+PRIVILEGE_CHECKS_USER
+ : P R I V I L E G E UL_ C H E C K S UL_ U S E R
+ ;
+
+PROCEDURE
+ : P R O C E D U R E
+ ;
+
+PROCESS
+ : P R O C E S S
+ ;
+
+PROCESSLIST
+ : P R O C E S S L I S T
+ ;
+
+PROFILE
+ : P R O F I L E
+ ;
+
+PROFILES
+ : P R O F I L E S
+ ;
+
+PROXY
+ : P R O X Y
+ ;
+
+PURGE
+ : P U R G E
+ ;
+
+QUARTER
+ : Q U A R T E R
+ ;
+
+QUERY
+ : Q U E R Y
+ ;
+
+QUICK
+ : Q U I C K
+ ;
+
+RANDOM
+ : R A N D O M
+ ;
+
+RANGE
+ : R A N G E
+ ;
+
+RANK
+ : R A N K
+ ;
+
+READ
+ : R E A D
+ ;
+
+READS
+ : R E A D S
+ ;
+
+READ_ONLY
+ : R E A D UL_ O N L Y
+ ;
+
+READ_WRITE
+ : R E A D UL_ W R I T E
+ ;
+
+REAL
+ : R E A L
+ ;
+
+REBUILD
+ : R E B U I L D
+ ;
+
+RECOVER
+ : R E C O V E R
+ ;
+
+RECURSIVE
+ : R E C U R S I V E
+ ;
+
+REDO_BUFFER_SIZE
+ : R E D O UL_ B U F F E R UL_ S I Z E
+ ;
+
+REDUNDANT
+ : R E D U N D A N T
+ ;
+
+REFERENCE
+ : R E F E R E N C E
+ ;
+
+REFERENCES
+ : R E F E R E N C E S
+ ;
+
+REGEXP
+ : R E G E X P
+ ;
+
+RELAY
+ : R E L A Y
+ ;
+
+RELAYLOG
+ : R E L A Y L O G
+ ;
+
+RELAY_LOG_FILE
+ : R E L A Y UL_ L O G UL_ F I L E
+ ;
+
+RELAY_LOG_POS
+ : R E L A Y UL_ L O G UL_ P O S
+ ;
+
+RELAY_THREAD
+ : R E L A Y UL_ T H R E A D
+ ;
+
+RELEASE
+ : R E L E A S E
+ ;
+
+RELOAD
+ : R E L O A D
+ ;
+
+REMOVE
+ : R E M O V E
+ ;
+
+RENAME
+ : R E N A M E
+ ;
+
+REORGANIZE
+ : R E O R G A N I Z E
+ ;
+
+REPAIR
+ : R E P A I R
+ ;
+
+REPEAT
+ : R E P E A T
+ ;
+
+REPEATABLE
+ : R E P E A T A B L E
+ ;
+
+REPLACE
+ : R E P L A C E
+ ;
+
+REPLICA
+ : R E P L I C A
+ ;
+
+REPLICAS
+ : R E P L I C A S
+ ;
+
+REPLICATE_DO_DB
+ : R E P L I C A T E UL_ D O UL_ D B
+ ;
+
+REPLICATE_DO_TABLE
+ : R E P L I C A T E UL_ D O UL_ T A B L E
+ ;
+
+REPLICATE_IGNORE_DB
+ : R E P L I C A T E UL_ I G N O R E UL_ D B
+ ;
+
+REPLICATE_IGNORE_TABLE
+ : R E P L I C A T E UL_ I G N O R E UL_ T A B L E
+ ;
+
+REPLICATE_REWRITE_DB
+ : R E P L I C A T E UL_ R E W R I T E UL_ D B
+ ;
+
+REPLICATE_WILD_DO_TABLE
+ : R E P L I C A T E UL_ W I L D UL_ D O UL_ T A B L E
+ ;
+
+REPLICATE_WILD_IGNORE_TABLE
+ : R E P L I C A T E UL_ W I L D UL_ I G N O R E UL_ T A B L E
+ ;
+
+REPLICATION
+ : R E P L I C A T I O N
+ ;
+
+REQUIRE
+ : R E Q U I R E
+ ;
+
+REQUIRE_ROW_FORMAT
+ : R E Q U I R E UL_ R O W UL_ F O R M A T
+ ;
+
+REQUIRE_TABLE_PRIMARY_KEY_CHECK
+ : R E Q U I R E UL_ T A B L E UL_ P R I M A R Y UL_ K E Y UL_ C H E C K
+ ;
+
+RESET
+ : R E S E T
+ ;
+
+RESIGNAL
+ : R E S I G N A L
+ ;
+
+RESOURCE
+ : R E S O U R C E
+ ;
+
+RESPECT
+ : R E S P E C T
+ ;
+
+RESTART
+ : R E S T A R T
+ ;
+
+RESTORE
+ : R E S T O R E
+ ;
+
+RESTRICT
+ : R E S T R I C T
+ ;
+
+RESUME
+ : R E S U M E
+ ;
+
+RETAIN
+ : R E T A I N
+ ;
+
+RETURN
+ : R E T U R N
+ ;
+
+RETURNED_SQLSTATE
+ : R E T U R N E D UL_ S Q L S T A T E
+ ;
+
+RETURNING
+ : R E T U R N I N G
+ ;
+
+RETURNS
+ : R E T U R N S
+ ;
+
+REUSE
+ : R E U S E
+ ;
+
+REVERSE
+ : R E V E R S E
+ ;
+
+REVOKE
+ : R E V O K E
+ ;
+
+RIGHT
+ : R I G H T
+ ;
+
+RLIKE
+ : R L I K E
+ ;
+
+ROLE
+ : R O L E
+ ;
+
+ROLLBACK
+ : R O L L B A C K
+ ;
+
+ROLLUP
+ : R O L L U P
+ ;
+
+ROTATE
+ : R O T A T E
+ ;
+
+ROUTINE
+ : R O U T I N E
+ ;
+
+ROW
+ : R O W
+ ;
+
+ROWS
+ : R O W S
+ ;
+
+ROW_COUNT
+ : R O W UL_ C O U N T
+ ;
+
+ROW_FORMAT
+ : R O W UL_ F O R M A T
+ ;
+
+ROW_NUMBER
+ : R O W UL_ N U M B E R
+ ;
+
+RTREE
+ : R T R E E
+ ;
+
+SAVEPOINT
+ : S A V E P O I N T
+ ;
+
+SCHEDULE
+ : S C H E D U L E
+ ;
+
+SCHEMA
+ : S C H E M A
+ ;
+
+SCHEMAS
+ : S C H E M A S
+ ;
+
+SCHEMA_NAME
+ : S C H E M A UL_ N A M E
+ ;
+
+SECOND
+ : S E C O N D
+ ;
+
+SECONDARY
+ : S E C O N D A R Y
+ ;
+
+SECONDARY_ENGINE
+ : S E C O N D A R Y UL_ E N G I N E
+ ;
+
+SECONDARY_ENGINE_ATTRIBUTE
+ : S E C O N D A R Y UL_ E N G I N E UL_ A T T R I B U T E
+ ;
+
+SECONDARY_LOAD
+ : S E C O N D A R Y UL_ L O A D
+ ;
+
+SECONDARY_UNLOAD
+ : S E C O N D A R Y UL_ U N L O A D
+ ;
+
+SECOND_MICROSECOND
+ : S E C O N D UL_ M I C R O S E C O N D
+ ;
+
+SECURITY
+ : S E C U R I T Y
+ ;
+
+SELECT
+ : S E L E C T
+ ;
+
+SENSITIVE
+ : S E N S I T I V E
+ ;
+
+SEPARATOR
+ : S E P A R A T O R
+ ;
+
+SERIAL
+ : S E R I A L
+ ;
+
+SERIALIZABLE
+ : S E R I A L I Z A B L E
+ ;
+
+SERVER
+ : S E R V E R
+ ;
+
+SESSION
+ : S E S S I O N
+ ;
+
+SET
+ : S E T
+ ;
+
+SHARE
+ : S H A R E
+ ;
+
+SHOW
+ : S H O W
+ ;
+
+SHUTDOWN
+ : S H U T D O W N
+ ;
+
+SIGNAL
+ : S I G N A L
+ ;
+
+SIGNED
+ : S I G N E D
+ ;
+
+SIGNED_INT
+ : SIGNED ' ' INT
+ ;
+
+SIGNED_INTEGER
+ : SIGNED ' ' INTEGER
+ ;
+
+SIMPLE
+ : S I M P L E
+ ;
+
+SKIP_SYMBOL
+ : S K I P
+ ;
+
+SLAVE
+ : S L A V E
+ ;
+
+SLOW
+ : S L O W
+ ;
+
+SMALLINT
+ : S M A L L I N T
+ ;
+
+SNAPSHOT
+ : S N A P S H O T
+ ;
+
+SOCKET
+ : S O C K E T
+ ;
+
+SOME
+ : S O M E -> type(ANY)
+ ;
+
+SONAME
+ : S O N A M E
+ ;
+
+SOUNDS
+ : S O U N D S
+ ;
+
+SOURCE
+ : S O U R C E
+ ;
+
+SPATIAL
+ : S P A T I A L
+ ;
+
+SPECIFIC
+ : S P E C I F I C
+ ;
+
+SQL
+ : S Q L
+ ;
+
+SQLEXCEPTION
+ : S Q L E X C E P T I O N
+ ;
+
+SQLSTATE
+ : S Q L S T A T E
+ ;
+
+SQLWARNING
+ : S Q L W A R N I N G
+ ;
+
+SQL_AFTER_GTIDS
+ : S Q L UL_ A F T E R UL_ G T I D S
+ ;
+
+SQL_AFTER_MTS_GAPS
+ : S Q L UL_ A F T E R UL_ M T S UL_ G A P S
+ ;
+
+SQL_BEFORE_GTIDS
+ : S Q L UL_ B E F O R E UL_ G T I D S
+ ;
+
+SQL_BIG_RESULT
+ : S Q L UL_ B I G UL_ R E S U L T
+ ;
+
+SQL_BUFFER_RESULT
+ : S Q L UL_ B U F F E R UL_ R E S U L T
+ ;
+
+SQL_CALC_FOUND_ROWS
+ : S Q L UL_ C A L C UL_ F O U N D UL_ R O W S
+ ;
+
+SQL_NO_CACHE
+ : S Q L UL_ N O UL_ C A C H E
+ ;
+
+SQL_SMALL_RESULT
+ : S Q L UL_ S M A L L UL_ R E S U L T
+ ;
+
+SQL_THREAD
+ : S Q L UL_ T H R E A D
+ ;
+
+SQL_TSI_DAY
+ : S Q L UL_ T S I UL_ D A Y -> type(DAY)
+ ;
+
+SQL_TSI_HOUR
+ : S Q L UL_ T S I UL_ H O U R -> type(HOUR)
+ ;
+
+SQL_TSI_MINUTE
+ : S Q L UL_ T S I UL_ M I N U T E -> type(MINUTE)
+ ;
+
+SQL_TSI_MONTH
+ : S Q L UL_ T S I UL_ M O N T H -> type(MONTH)
+ ;
+
+SQL_TSI_QUARTER
+ : S Q L UL_ T S I UL_ Q U A R T E R -> type(QUARTER)
+ ;
+
+SQL_TSI_SECOND
+ : S Q L UL_ T S I UL_ S E C O N D -> type(SECOND)
+ ;
+
+SQL_TSI_WEEK
+ : S Q L UL_ T S I UL_ W E E K -> type(WEEK)
+ ;
+
+SQL_TSI_YEAR
+ : S Q L UL_ T S I UL_ Y E A R -> type(YEAR)
+ ;
+
+SRID
+ : S R I D
+ ;
+
+SSL
+ : S S L
+ ;
+
+STACKED
+ : S T A C K E D
+ ;
+
+START
+ : S T A R T
+ ;
+
+STARTING
+ : S T A R T I N G
+ ;
+
+STARTS
+ : S T A R T S
+ ;
+
+STATS_AUTO_RECALC
+ : S T A T S UL_ A U T O UL_ R E C A L C
+ ;
+
+STATS_PERSISTENT
+ : S T A T S UL_ P E R S I S T E N T
+ ;
+
+STATS_SAMPLE_PAGES
+ : S T A T S UL_ S A M P L E UL_ P A G E S
+ ;
+
+STATUS
+ : S T A T U S
+ ;
+
+STOP
+ : S T O P
+ ;
+
+STORAGE
+ : S T O R A G E
+ ;
+
+STORED
+ : S T O R E D
+ ;
+
+STRAIGHT_JOIN
+ : S T R A I G H T UL_ J O I N
+ ;
+
+STREAM
+ : S T R E A M
+ ;
+
+STRING
+ : S T R I N G
+ ;
+
+SUBCLASS_ORIGIN
+ : S U B C L A S S UL_ O R I G I N
+ ;
+
+SUBJECT
+ : S U B J E C T
+ ;
+
+SUBPARTITION
+ : S U B P A R T I T I O N
+ ;
+
+SUBPARTITIONS
+ : S U B P A R T I T I O N S
+ ;
+
+SUPER
+ : S U P E R
+ ;
+
+SUSPEND
+ : S U S P E N D
+ ;
+
+SWAPS
+ : S W A P S
+ ;
+
+SWITCHES
+ : S W I T C H E S
+ ;
+
+SYSTEM
+ : S Y S T E M
+ ;
+
+SOURCE_BIND
+ : S O U R C E UL_ B I N D
+ ;
+
+SOURCE_HOST
+ : S O U R C E UL_ H O S T
+ ;
+
+SOURCE_USER
+ : S O U R C E UL_ U S E R
+ ;
+
+SOURCE_PASSWORD
+ : S O U R C E UL_ P A S S W O R D
+ ;
+
+SOURCE_PORT
+ : S O U R C E UL_ P O R T
+ ;
+
+SOURCE_LOG_FILE
+ : S O U R C E UL_ L O G UL_ F I L E
+ ;
+
+SOURCE_LOG_POS
+ : S O U R C E UL_ L O G UL_ P O S
+ ;
+
+SOURCE_AUTO_POSITION
+ : S O U R C E UL_ A U T O UL_ P O S I T I O N
+ ;
+
+SOURCE_HEARTBEAT_PERIOD
+ : S O U R C E UL_ H E A R T B E A T UL_ P E R I O D
+ ;
+
+SOURCE_CONNECT_RETRY
+ : S O U R C E UL_ C O N N E C T UL_ R E T R Y
+ ;
+
+SOURCE_RETRY_COUNT
+ : S O U R C E UL_ R E T R Y UL_ C O U N T
+ ;
+
+SOURCE_CONNECTION_AUTO_FAILOVER
+ : S O U R C E UL_ C O N N E C T I O N UL_ A U T O UL_ F A I L O V E R
+ ;
+
+SOURCE_DELAY
+ : S O U R C E UL_ D E L A Y
+ ;
+
+SOURCE_COMPRESSION_ALGORITHMS
+ : S O U R C E UL_ C O M P R E S S I O N UL_ A L G O R I T H M S
+ ;
+
+SOURCE_ZSTD_COMPRESSION_LEVEL
+ : S O U R C E UL_ Z S T D UL_ C O M P R E S S I O N UL_ L E V E L
+ ;
+
+SOURCE_SSL
+ : S O U R C E UL_ S S L
+ ;
+
+SOURCE_SSL_CA
+ : S O U R C E UL_ S S L UL_ C A
+ ;
+
+SOURCE_SSL_CAPATH
+ : S O U R C E UL_ S S L UL_ C A P A T H
+ ;
+
+SOURCE_SSL_CERT
+ : S O U R C E UL_ S S L UL_ C E R T
+ ;
+
+SOURCE_SSL_CRL
+ : S O U R C E UL_ S S L UL_ C R L
+ ;
+
+SOURCE_SSL_CRLPATH
+ : S O U R C E UL_ S S L UL_ C R L P A T H
+ ;
+
+SOURCE_SSL_KEY
+ : S O U R C E UL_ S S L UL_ K E Y
+ ;
+
+SOURCE_SSL_CIPHER
+ : S O U R C E UL_ S S L UL_ C I P H E R
+ ;
+
+SOURCE_SSL_VERIFY_SERVER_CERT
+ : S O U R C E UL_ S S L UL_ V E R I F Y UL_ S E R V E R UL_ C E R T
+ ;
+
+SOURCE_TLS_VERSION
+ : S O U R C E UL_ T L S UL_ V E R S I O N
+ ;
+
+SOURCE_TLS_CIPHERSUITES
+ : S O U R C E UL_ T L S UL_ C I P H E R S U I T E S
+ ;
+
+SOURCE_PUBLIC_KEY_PATH
+ : S O U R C E UL_ P U B L I C UL_ K E Y UL_ P A T H
+ ;
+
+TABLE
+ : T A B L E
+ ;
+
+TABLES
+ : T A B L E S
+ ;
+
+TABLESPACE
+ : T A B L E S P A C E
+ ;
+
+TABLE_CHECKSUM
+ : T A B L E UL_ C H E C K S U M
+ ;
+
+TABLE_NAME
+ : T A B L E UL_ N A M E
+ ;
+
+TEMPORARY
+ : T E M P O R A R Y
+ ;
+
+TEMPTABLE
+ : T E M P T A B L E
+ ;
+
+TERMINATED
+ : T E R M I N A T E D
+ ;
+
+TEXT
+ : T E X T
+ ;
+
+THAN
+ : T H A N
+ ;
+
+THEN
+ : T H E N
+ ;
+
+THREAD_PRIORITY
+ : T H R E A D UL_ P R I O R I T Y
+ ;
+
+TIES
+ : T I E S
+ ;
+
+TIME
+ : T I M E
+ ;
+
+TIMESTAMP
+ : T I M E S T A M P
+ ;
+
+TIMESTAMP_ADD
+ : T I M E S T A M P UL_ A D D
+ ;
+
+TIMESTAMP_DIFF
+ : T I M E S T A M P UL_ D I F F
+ ;
+
+TINYBLOB
+ : T I N Y B L O B
+ ;
+
+TINYINT
+ : T I N Y I N T
+ ;
+
+TINYTEXT
+ : T I N Y T E X T
+ ;
+
+TLS
+ : T L S
+ ;
+
+TO
+ : T O
+ ;
+
+TRAILING
+ : T R A I L I N G
+ ;
+
+TRANSACTION
+ : T R A N S A C T I O N
+ ;
+
+TRIGGER
+ : T R I G G E R
+ ;
+
+TRIGGERS
+ : T R I G G E R S
+ ;
+
+TRUE
+ : T R U E
+ ;
+
+TRUNCATE
+ : T R U N C A T E
+ ;
+
+TYPE
+ : T Y P E
+ ;
+
+TYPES
+ : T Y P E S
+ ;
+
+UNBOUNDED
+ : U N B O U N D E D
+ ;
+
+UNCOMMITTED
+ : U N C O M M I T T E D
+ ;
+
+UNDEFINED
+ : U N D E F I N E D
+ ;
+
+UNDO
+ : U N D O
+ ;
+
+UNDOFILE
+ : U N D O F I L E
+ ;
+
+UNDO_BUFFER_SIZE
+ : U N D O UL_ B U F F E R UL_ S I Z E
+ ;
+
+UNICODE
+ : U N I C O D E
+ ;
+
+UNINSTALL
+ : U N I N S T A L L
+ ;
+
+UNION
+ : U N I O N
+ ;
+
+UNIQUE
+ : U N I Q U E
+ ;
+
+UNKNOWN
+ : U N K N O W N
+ ;
+
+UNLOCK
+ : U N L O C K
+ ;
+
+UNSIGNED
+ : U N S I G N E D
+ ;
+
+UNSIGNED_INT
+ : UNSIGNED ' ' INT
+ ;
+
+UNSIGNED_INTEGER
+ : UNSIGNED ' ' INTEGER
+ ;
+
+UNTIL
+ : U N T I L
+ ;
+
+UPDATE
+ : U P D A T E
+ ;
+
+UPGRADE
+ : U P G R A D E
+ ;
+
+USAGE
+ : U S A G E
+ ;
+
+USE
+ : U S E
+ ;
+
+USER
+ : U S E R
+ ;
+
+USER_RESOURCES
+ : U S E R UL_ R E S O U R C E S
+ ;
+
+USE_FRM
+ : U S E UL_ F R M
+ ;
+
+USING
+ : U S I N G
+ ;
+
+UTC_DATE
+ : U T C UL_ D A T E
+ ;
+
+UTC_TIME
+ : U T C UL_ T I M E
+ ;
+
+UTC_TIMESTAMP
+ : U T C UL_ T I M E S T A M P
+ ;
+
+VALIDATION
+ : V A L I D A T I O N
+ ;
+
+VALUE
+ : V A L U E
+ ;
+
+VALUES
+ : V A L U E S
+ ;
+
+VARBINARY
+ : V A R B I N A R Y
+ ;
+
+VARCHAR
+ : V A R C H A R
+ ;
+
+VARCHARACTER
+ : V A R C H A R A C T E R
+ ;
+
+VARIABLES
+ : V A R I A B L E S
+ ;
+
+VARYING
+ : V A R Y I N G
+ ;
+
+VCPU
+ : V C P U
+ ;
+
+VIEW
+ : V I E W
+ ;
+
+VIRTUAL
+ : V I R T U A L
+ ;
+
+VISIBLE
+ : V I S I B L E
+ ;
+
+WAIT
+ : W A I T
+ ;
+
+WARNINGS
+ : W A R N I N G S
+ ;
+
+WEEK
+ : W E E K
+ ;
+
+WEIGHT_STRING
+ : W E I G H T UL_ S T R I N G
+ ;
+
+WHEN
+ : W H E N
+ ;
+
+WHERE
+ : W H E R E
+ ;
+
+WHILE
+ : W H I L E
+ ;
+
+WINDOW
+ : W I N D O W
+ ;
+
+WITH
+ : W I T H
+ ;
+
+WITHOUT
+ : W I T H O U T
+ ;
+
+WORK
+ : W O R K
+ ;
+
+WRAPPER
+ : W R A P P E R
+ ;
+
+WRITE
+ : W R I T E
+ ;
+
+X509
+ : X '509'
+ ;
+
+XA
+ : X A
+ ;
+
+XID
+ : X I D
+ ;
+
+XML
+ : X M L
+ ;
+
+XOR
+ : X O R
+ ;
+
+YEAR
+ : Y E A R
+ ;
+
+YEAR_MONTH
+ : Y E A R UL_ M O N T H
+ ;
+
+ZEROFILL
+ : Z E R O F I L L
+ ;
+
+JSON_ARRAY
+ : J S O N UL_ A R R A Y
+ ;
+
+JSON_ARRAY_APPEND
+ : J S O N UL_ A R R A Y UL_ A P P E N D
+ ;
+
+JSON_ARRAY_INSERT
+ : J S O N UL_ A R R A Y UL_ I N S E R T
+ ;
+
+JSON_CONTAINS
+ : J S O N UL_ C O N T A I N S
+ ;
+
+JSON_CONTAINS_PATH
+ : J S O N UL_ C O N T A I N S UL_ P A T H
+ ;
+JSON_DEPTH
+ : J S O N UL_ D E P T H
+ ;
+
+JSON_EXTRACT
+ : J S O N UL_ E X T R A C T
+ ;
+
+JSON_INSERT
+ : J S O N UL_ I N S E R T
+ ;
+
+JSON_KEYS
+ : J S O N UL_ K E Y S
+ ;
+
+JSON_LENGTH
+ : J S O N UL_ L E N G T H
+ ;
+
+JSON_MERGE
+ : J S O N UL_ M E R G E
+ ;
+
+JSON_MERGE_PATCH
+ : J S O N UL_ M E R G E UL_ P A T C H
+ ;
+
+JSON_MERGE_PRESERVE
+ : J S O N UL_ M E R G E UL_ P R E S E R V E
+ ;
+
+JSON_OBJECT
+ : J S O N UL_ O B J E C T
+ ;
+
+JSON_OVERLAPS
+ : J S O N UL_ O V E R L A P S
+ ;
+
+JSON_PRETTY
+ : J S O N UL_ P R E T T Y
+ ;
+
+JSON_QUOTE
+ : J S O N UL_ Q U O T E
+ ;
+
+JSON_REMOVE
+ : J S O N UL_ R E M O V E
+ ;
+
+JSON_REPLACE
+ : J S O N UL_ R E P L A C E
+ ;
+
+JSON_SCHEMA_VALID
+ : J S O N UL_ S C H E M A UL_ V A L I D
+ ;
+
+JSON_SCHEMA_VALIDATION_REPORT
+ : J S O N UL_ S C H E M A UL_ V A L I D A T I O N UL_ R E P O R T
+ ;
+
+JSON_SEARCH
+ : J S O N UL_ S E A R C H
+ ;
+
+JSON_SET
+ : J S O N UL_ S E T
+ ;
+
+JSON_STORAGE_FREE
+ : J S O N UL_ S T O R A G E UL_ F R E E
+ ;
+
+JSON_STORAGE_SIZE
+ : J S O N UL_ S T O R A G E UL_ S I Z E
+ ;
+
+JSON_TYPE
+ : J S O N UL_ T Y P E
+ ;
+
+JSON_UNQUOTE
+ : J S O N UL_ U N Q U O T E
+ ;
+
+JSON_VALID
+ : J S O N UL_ V A L I D
+ ;
+
+ZONE
+ : Z O N E
+ ;
+
diff --git a/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Keyword.g4 b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Keyword.g4
new file mode 100644
index 0000000000000..c9f5c6d91c358
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Keyword.g4
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+lexer grammar Keyword;
+
+import Alphabet;
+
+WS
+ : [ \t\r\n] + ->skip
+ ;
+
+MAX
+ : M A X
+ ;
+
+MIN
+ : M I N
+ ;
+
+SUM
+ : S U M
+ ;
+
+COUNT
+ : C O U N T
+ ;
+
+GROUP_CONCAT
+ : G R O U P UL_ C O N C A T
+ ;
+
+CAST
+ : C A S T
+ ;
+
+POSITION
+ : P O S I T I O N
+ ;
+
+SUBSTRING
+ : S U B S T R I N G
+ ;
+
+SUBSTR
+ : S U B S T R
+ ;
+
+EXTRACT
+ : E X T R A C T
+ ;
+
+TRIM
+ : T R I M
+ ;
+
+LAST_DAY
+ : L A S T UL_ D A Y
+ ;
+
+TRADITIONAL
+ : T R A D I T I O N A L
+ ;
+
+TREE
+ : T R E E
+ ;
+
+MYSQL_MAIN
+ : M Y S Q L UL_ M A I N
+ ;
+
+MYSQL_ADMIN
+ : M Y S Q L UL_ A D M I N
+ ;
+
+INSTANT
+ : I N S T A N T
+ ;
+
+INPLACE
+ : I N P L A C E
+ ;
+
+COPY
+ : C O P Y
+ ;
+
+UL_BINARY
+ : UL_ B I N A R Y
+ ;
+
+AUTOCOMMIT
+ : A U T O C O M M I T
+ ;
+
+INNODB
+ : 'INNODB'
+ ;
+
+REDO_LOG
+ : 'REDO_LOG'
+ ;
+
+DELIMITER
+ : D E L I M I T E R
+ ;
diff --git a/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Literals.g4 b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Literals.g4
new file mode 100644
index 0000000000000..ae3814a59a5ef
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Literals.g4
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+lexer grammar Literals;
+
+import Alphabet, Symbol;
+
+FILESIZE_LITERAL
+ : INT_NUM_ ('K'|'M'|'G'|'T')
+ ;
+
+SINGLE_QUOTED_TEXT
+ : SQ_ ('\\'. | '\'\'' | ~('\'' | '\\'))* SQ_
+ ;
+
+DOUBLE_QUOTED_TEXT
+ : DQ_ ( '\\'. | '""' | ~('"'| '\\') )* DQ_
+ ;
+
+BQUOTA_STRING
+ : BQ_ ( '\\'. | '``' | ~('`'|'\\'))* BQ_
+ ;
+
+NCHAR_TEXT
+ : N SINGLE_QUOTED_TEXT
+ ;
+
+UNDERSCORE_CHARSET
+ : UL_ [a-z0-9A-Z]+
+ ;
+
+NUMBER_
+ : INT_NUM_
+ | FLOAT_NUM_
+ | DECIMAL_NUM_
+ ;
+
+INT_NUM_
+ : DIGIT+
+ ;
+
+FLOAT_NUM_
+ : INT_NUM_? DOT_? INT_NUM_ E (PLUS_ | MINUS_)? INT_NUM_
+ ;
+
+DECIMAL_NUM_
+ : INT_NUM_? DOT_ INT_NUM_
+ ;
+
+HEX_DIGIT_
+ : '0x' HEX_+ | X SQ_ HEX_+ SQ_ | X SQ_ + SQ_
+ ;
+
+BIT_NUM_
+ : '0b' ('0' | '1')+ | B SQ_ ('0' | '1')+ SQ_
+ ;
+
+IDENTIFIER_
+ : [A-Za-z_$0-9\u0080-\uFFFF]*?[A-Za-z_$\u0080-\uFFFF]+?[A-Za-z_$0-9\u0080-\uFFFF]*
+ | BQ_ ~'`'+ BQ_
+ ;
+
+IP_ADDRESS
+ : INT_NUM_ DOT_ INT_NUM_ DOT_ INT_NUM_ DOT_ INT_NUM_
+ ;
+
+NOT_SUPPORT_
+ : 'not support'
+ ;
+
+fragment DIGIT
+ : [0-9]
+ ;
+
+fragment HEX_
+ : [0-9a-fA-F]
+ ;
diff --git a/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Symbol.g4 b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Symbol.g4
new file mode 100644
index 0000000000000..ab8af14019271
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/antlr4/imports/hive/Symbol.g4
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+lexer grammar Symbol;
+
+AND_: '&&';
+OR_: '||';
+NOT_: '!';
+TILDE_: '~';
+VERTICAL_BAR_: '|';
+AMPERSAND_: '&';
+SIGNED_LEFT_SHIFT_: '<<';
+SIGNED_RIGHT_SHIFT_: '>>';
+CARET_: '^';
+MOD_: '%';
+COLON_: ':';
+PLUS_: '+';
+MINUS_: '-';
+ASTERISK_: '*';
+SLASH_: '/';
+BACKSLASH_: '\\';
+DOT_: '.';
+DOT_ASTERISK_: '.*';
+SAFE_EQ_: '<=>';
+DEQ_: '==';
+EQ_: '=';
+NEQ_: '<>' | '!=';
+GT_: '>';
+GTE_: '>=';
+LT_: '<';
+LTE_: '<=';
+POUND_: '#';
+LP_: '(';
+RP_: ')';
+LBE_: '{';
+RBE_: '}';
+LBT_: '[';
+RBT_: ']';
+COMMA_: ',';
+DQ_: '"';
+SQ_ : '\'';
+BQ_: '`';
+QUESTION_: '?';
+AT_: '@';
+SEMI_: ';';
+ASSIGNMENT_: ':=';
+JSON_SEPARATOR: '->';
+JSON_UNQUOTED_SEPARATOR: '->>';
diff --git a/parser/sql/dialect/hive/src/main/antlr4/org/apache/shardingsphere/sql/parser/autogen/HiveStatement.g4 b/parser/sql/dialect/hive/src/main/antlr4/org/apache/shardingsphere/sql/parser/autogen/HiveStatement.g4
new file mode 100644
index 0000000000000..f37d6c71d0def
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/antlr4/org/apache/shardingsphere/sql/parser/autogen/HiveStatement.g4
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+grammar HiveStatement;
+
+import Comments, DMLStatement;
+
+// TODO correct hive SQL parsing according to official documentation
+execute
+ : (select
+ | insert
+ | update
+ | delete
+ ) (SEMI_ EOF? | EOF)
+ | EOF
+ ;
diff --git a/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/parser/HiveLexer.java b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/parser/HiveLexer.java
new file mode 100644
index 0000000000000..3badfcab97ba4
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/parser/HiveLexer.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.sql.parser.hive.parser;
+
+import org.antlr.v4.runtime.CharStream;
+import org.apache.shardingsphere.sql.parser.api.parser.SQLLexer;
+
+/**
+ * SQL lexer for hive.
+ */
+public final class HiveLexer extends HiveStatementLexer implements SQLLexer {
+
+ public HiveLexer(final CharStream input) {
+ super(input);
+ }
+}
diff --git a/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/parser/HiveParser.java b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/parser/HiveParser.java
new file mode 100644
index 0000000000000..86310918e730b
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/parser/HiveParser.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.sql.parser.hive.parser;
+
+import org.antlr.v4.runtime.CommonTokenStream;
+import org.antlr.v4.runtime.TokenStream;
+import org.apache.shardingsphere.sql.parser.api.ASTNode;
+import org.apache.shardingsphere.sql.parser.api.parser.SQLParser;
+import org.apache.shardingsphere.sql.parser.core.ParseASTNode;
+
+/**
+ * SQL parser for hive.
+ */
+public final class HiveParser extends HiveStatementParser implements SQLParser {
+
+ public HiveParser(final TokenStream input) {
+ super(input);
+ }
+
+ @Override
+ public ASTNode parse() {
+ return new ParseASTNode(execute(), (CommonTokenStream) getTokenStream());
+ }
+}
diff --git a/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/parser/HiveParserFacade.java b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/parser/HiveParserFacade.java
new file mode 100644
index 0000000000000..44e9317aa0b1f
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/parser/HiveParserFacade.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.sql.parser.hive.parser;
+
+import org.apache.shardingsphere.sql.parser.api.parser.SQLLexer;
+import org.apache.shardingsphere.sql.parser.api.parser.SQLParser;
+import org.apache.shardingsphere.sql.parser.spi.DialectSQLParserFacade;
+
+/**
+ * SQL parser facade for hive.
+ */
+public final class HiveParserFacade implements DialectSQLParserFacade {
+
+ @Override
+ public Class extends SQLLexer> getLexerClass() {
+ return HiveLexer.class;
+ }
+
+ @Override
+ public Class extends SQLParser> getParserClass() {
+ return HiveParser.class;
+ }
+
+ @Override
+ public String getDatabaseType() {
+ return "Hive";
+ }
+}
diff --git a/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/visitor/statement/HiveStatementVisitor.java b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/visitor/statement/HiveStatementVisitor.java
new file mode 100644
index 0000000000000..8e08270908442
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/visitor/statement/HiveStatementVisitor.java
@@ -0,0 +1,523 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.sql.parser.hive.visitor.statement;
+
+import lombok.AccessLevel;
+import lombok.Getter;
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.antlr.v4.runtime.Token;
+import org.antlr.v4.runtime.misc.Interval;
+import org.apache.shardingsphere.sql.parser.api.ASTNode;
+import org.apache.shardingsphere.sql.parser.sql.common.enums.ParameterMarkerType;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.index.IndexNameSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.index.IndexSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BetweenExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.CollateExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExistsSubqueryExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ListExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.NotExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.complex.CommonExpressionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.ParameterMarkerExpressionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.SimpleExpressionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubqueryExpressionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ExpressionProjectionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.DatabaseSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.ParameterMarkerSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.util.SQLUtils;
+import org.apache.shardingsphere.sql.parser.sql.common.value.collection.CollectionValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.BooleanLiteralValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.NullLiteralValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.NumberLiteralValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.OtherLiteralValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.StringLiteralValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.parametermarker.ParameterMarkerValue;
+import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLSelectStatement;
+
+import java.util.Collection;
+import java.util.LinkedList;
+
+/**
+ * Statement visitor for hive.
+ */
+@Getter(AccessLevel.PROTECTED)
+public abstract class HiveStatementVisitor extends HiveStatementBaseVisitor {
+
+ private final Collection parameterMarkerSegments = new LinkedList<>();
+
+ @Override
+ public final ASTNode visitParameterMarker(final ParameterMarkerContext ctx) {
+ return new ParameterMarkerValue(parameterMarkerSegments.size(), ParameterMarkerType.QUESTION);
+ }
+
+ @Override
+ public final ASTNode visitLiterals(final LiteralsContext ctx) {
+ if (null != ctx.stringLiterals()) {
+ return visit(ctx.stringLiterals());
+ }
+ if (null != ctx.numberLiterals()) {
+ return visit(ctx.numberLiterals());
+ }
+ if (null != ctx.temporalLiterals()) {
+ return visit(ctx.temporalLiterals());
+ }
+ if (null != ctx.hexadecimalLiterals()) {
+ return visit(ctx.hexadecimalLiterals());
+ }
+ if (null != ctx.bitValueLiterals()) {
+ return visit(ctx.bitValueLiterals());
+ }
+ if (null != ctx.booleanLiterals()) {
+ return visit(ctx.booleanLiterals());
+ }
+ if (null != ctx.nullValueLiterals()) {
+ return visit(ctx.nullValueLiterals());
+ }
+ throw new IllegalStateException("Literals must have string, number, dateTime, hex, bit, boolean or null.");
+ }
+
+ @Override
+ public final ASTNode visitStringLiterals(final StringLiteralsContext ctx) {
+ return new StringLiteralValue(ctx.getText());
+ }
+
+ @Override
+ public ASTNode visitString_(final String_Context ctx) {
+ return new StringLiteralValue(ctx.getText());
+ }
+
+ @Override
+ public final ASTNode visitNumberLiterals(final NumberLiteralsContext ctx) {
+ return new NumberLiteralValue(ctx.getText());
+ }
+
+ @Override
+ public ASTNode visitTemporalLiterals(final TemporalLiteralsContext ctx) {
+ // TODO deal with TemporalLiterals
+ return new OtherLiteralValue(ctx.getText());
+ }
+
+ @Override
+ public final ASTNode visitHexadecimalLiterals(final HexadecimalLiteralsContext ctx) {
+ // TODO deal with hexadecimalLiterals
+ return new OtherLiteralValue(ctx.getText());
+ }
+
+ @Override
+ public final ASTNode visitBitValueLiterals(final BitValueLiteralsContext ctx) {
+ // TODO deal with bitValueLiterals
+ return new OtherLiteralValue(ctx.getText());
+ }
+
+ @Override
+ public final ASTNode visitBooleanLiterals(final BooleanLiteralsContext ctx) {
+ return new BooleanLiteralValue(ctx.getText());
+ }
+
+ @Override
+ public final ASTNode visitNullValueLiterals(final NullValueLiteralsContext ctx) {
+ return new NullLiteralValue(ctx.getText());
+ }
+
+ @Override
+ public final ASTNode visitIdentifier(final IdentifierContext ctx) {
+ return new IdentifierValue(ctx.getText());
+ }
+
+ @Override
+ public final ASTNode visitSchemaName(final SchemaNameContext ctx) {
+ return new DatabaseSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier()));
+ }
+
+ @Override
+ public final ASTNode visitTableName(final TableNameContext ctx) {
+ SimpleTableSegment result = new SimpleTableSegment(new TableNameSegment(ctx.name().getStart().getStartIndex(),
+ ctx.name().getStop().getStopIndex(), new IdentifierValue(ctx.name().identifier().getText())));
+ OwnerContext owner = ctx.owner();
+ if (null != owner) {
+ result.setOwner((OwnerSegment) visit(owner));
+ }
+ return result;
+ }
+
+ @Override
+ public final ASTNode visitViewName(final ViewNameContext ctx) {
+ SimpleTableSegment result = new SimpleTableSegment(new TableNameSegment(ctx.identifier().getStart().getStartIndex(),
+ ctx.identifier().getStop().getStopIndex(), new IdentifierValue(ctx.identifier().getText())));
+ OwnerContext owner = ctx.owner();
+ if (null != owner) {
+ result.setOwner((OwnerSegment) visit(owner));
+ }
+ return result;
+ }
+
+ @Override
+ public final ASTNode visitOwner(final OwnerContext ctx) {
+ return new OwnerSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier()));
+ }
+
+ @Override
+ public ASTNode visitFunctionName(final FunctionNameContext ctx) {
+ FunctionSegment result = new FunctionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.identifier().IDENTIFIER_().getText(), ctx.getText());
+ if (null != ctx.owner()) {
+ result.setOwner((OwnerSegment) visit(ctx.owner()));
+ }
+ return result;
+ }
+
+ @Override
+ public final ASTNode visitColumnName(final ColumnNameContext ctx) {
+ return new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier()));
+ }
+
+ @Override
+ public final ASTNode visitIndexName(final IndexNameContext ctx) {
+ IndexNameSegment indexName = new IndexNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier()));
+ return new IndexSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), indexName);
+ }
+
+ @Override
+ public ASTNode visitTableList(final TableListContext ctx) {
+ CollectionValue result = new CollectionValue<>();
+ for (TableNameContext each : ctx.tableName()) {
+ result.getValue().add((SimpleTableSegment) visit(each));
+ }
+ return result;
+ }
+
+ @Override
+ public final ASTNode visitViewNames(final ViewNamesContext ctx) {
+ CollectionValue result = new CollectionValue<>();
+ for (ViewNameContext each : ctx.viewName()) {
+ result.getValue().add((SimpleTableSegment) visit(each));
+ }
+ return result;
+ }
+
+ @Override
+ public final ASTNode visitColumnNames(final ColumnNamesContext ctx) {
+ CollectionValue result = new CollectionValue<>();
+ for (ColumnNameContext each : ctx.columnName()) {
+ result.getValue().add((ColumnSegment) visit(each));
+ }
+ return result;
+ }
+
+ @Override
+ public final ASTNode visitExpr(final ExprContext ctx) {
+ if (null != ctx.booleanPrimary()) {
+ return visit(ctx.booleanPrimary());
+ }
+ if (null != ctx.XOR()) {
+ return createBinaryOperationExpression(ctx, "XOR");
+ }
+ if (null != ctx.andOperator()) {
+ return createBinaryOperationExpression(ctx, ctx.andOperator().getText());
+ }
+ if (null != ctx.orOperator()) {
+ return createBinaryOperationExpression(ctx, ctx.orOperator().getText());
+ }
+ return new NotExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (ExpressionSegment) visit(ctx.expr(0)), false);
+ }
+
+ private BinaryOperationExpression createBinaryOperationExpression(final ExprContext ctx, final String operator) {
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.expr(0));
+ ExpressionSegment right = (ExpressionSegment) visit(ctx.expr(1));
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
+ }
+
+ @Override
+ public final ASTNode visitBooleanPrimary(final BooleanPrimaryContext ctx) {
+ if (null != ctx.IS()) {
+ // TODO optimize operatorToken
+ String rightText = "";
+ if (null != ctx.NOT()) {
+ rightText = rightText.concat(ctx.start.getInputStream().getText(new Interval(ctx.NOT().getSymbol().getStartIndex(),
+ ctx.NOT().getSymbol().getStopIndex()))).concat(" ");
+ }
+ Token operatorToken = null;
+ if (null != ctx.NULL()) {
+ operatorToken = ctx.NULL().getSymbol();
+ }
+ if (null != ctx.TRUE()) {
+ operatorToken = ctx.TRUE().getSymbol();
+ }
+ if (null != ctx.FALSE()) {
+ operatorToken = ctx.FALSE().getSymbol();
+ }
+ int startIndex = null == operatorToken ? ctx.IS().getSymbol().getStopIndex() + 2 : operatorToken.getStartIndex();
+ rightText = rightText.concat(ctx.start.getInputStream().getText(new Interval(startIndex, ctx.stop.getStopIndex())));
+ ExpressionSegment right = new LiteralExpressionSegment(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex(), rightText);
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary());
+ String operator = "IS";
+ return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
+ }
+ if (null != ctx.comparisonOperator() || null != ctx.SAFE_EQ_()) {
+ return createCompareSegment(ctx);
+ }
+ if (null != ctx.MEMBER()) {
+ int startIndex = ctx.MEMBER().getSymbol().getStopIndex() + 5;
+ int endIndex = ctx.stop.getStopIndex() - 1;
+ String rightText = ctx.start.getInputStream().getText(new Interval(startIndex, endIndex));
+ ExpressionSegment right = new ExpressionProjectionSegment(startIndex, endIndex, rightText);
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary());
+ String operator = "MEMBER OF";
+ return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
+ }
+ if (null != ctx.assignmentOperator()) {
+ return createAssignmentSegment(ctx);
+ }
+ return visit(ctx.predicate());
+ }
+
+ private ASTNode createAssignmentSegment(final BooleanPrimaryContext ctx) {
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary());
+ ExpressionSegment right = (ExpressionSegment) visit(ctx.predicate());
+ String operator = ctx.assignmentOperator().getText();
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
+ }
+
+ private ASTNode createCompareSegment(final BooleanPrimaryContext ctx) {
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary());
+ ExpressionSegment right;
+ String operator;
+ if (null != ctx.ALL()) {
+ operator = null == ctx.SAFE_EQ_() ? ctx.comparisonOperator().getText() + " ALL" : ctx.SAFE_EQ_().getText();
+ } else {
+ operator = null == ctx.SAFE_EQ_() ? ctx.comparisonOperator().getText() : ctx.SAFE_EQ_().getText();
+ }
+ if (null != ctx.predicate()) {
+ right = (ExpressionSegment) visit(ctx.predicate());
+ } else {
+ right = new SubqueryExpressionSegment(new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), (MySQLSelectStatement) visit(ctx.subquery()),
+ getOriginalText(ctx.subquery())));
+ }
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
+ }
+
+ @Override
+ public final ASTNode visitPredicate(final PredicateContext ctx) {
+ if (null != ctx.IN()) {
+ return createInSegment(ctx);
+ }
+ if (null != ctx.BETWEEN()) {
+ return createBetweenSegment(ctx);
+ }
+ if (null != ctx.LIKE()) {
+ return createBinaryOperationExpressionFromLike(ctx);
+ }
+ if (null != ctx.REGEXP()) {
+ return createBinaryOperationExpressionFromRegexp(ctx);
+ }
+ if (null != ctx.RLIKE()) {
+ return createBinaryOperationExpressionFromRlike(ctx);
+ }
+ return visit(ctx.bitExpr(0));
+ }
+
+ private InExpression createInSegment(final PredicateContext ctx) {
+ boolean not = null != ctx.NOT();
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0));
+ ExpressionSegment right;
+ if (null != ctx.subquery()) {
+ right = new SubqueryExpressionSegment(new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), (MySQLSelectStatement) visit(ctx.subquery()),
+ getOriginalText(ctx.subquery())));
+ } else {
+ right = new ListExpression(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex());
+ for (ExprContext each : ctx.expr()) {
+ ((ListExpression) right).getItems().add((ExpressionSegment) visit(each));
+ }
+ }
+ return new InExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, not);
+ }
+
+ private BinaryOperationExpression createBinaryOperationExpressionFromLike(final PredicateContext ctx) {
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0));
+ String operator;
+ ExpressionSegment right;
+ if (null != ctx.SOUNDS()) {
+ right = (ExpressionSegment) visit(ctx.bitExpr(1));
+ operator = "SOUNDS LIKE";
+ } else {
+ ListExpression listExpression = new ListExpression(ctx.simpleExpr(0).start.getStartIndex(), ctx.simpleExpr().get(ctx.simpleExpr().size() - 1).stop.getStopIndex());
+ for (SimpleExprContext each : ctx.simpleExpr()) {
+ listExpression.getItems().add((ExpressionSegment) visit(each));
+ }
+ right = listExpression;
+ operator = null == ctx.NOT() ? "LIKE" : "NOT LIKE";
+ }
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
+ }
+
+ private BinaryOperationExpression createBinaryOperationExpressionFromRegexp(final PredicateContext ctx) {
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0));
+ ExpressionSegment right = (ExpressionSegment) visit(ctx.bitExpr(1));
+ String operator = null == ctx.NOT() ? "REGEXP" : "NOT REGEXP";
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
+ }
+
+ private BinaryOperationExpression createBinaryOperationExpressionFromRlike(final PredicateContext ctx) {
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0));
+ ExpressionSegment right = (ExpressionSegment) visit(ctx.bitExpr(1));
+ String operator = null == ctx.NOT() ? "RLIKE" : "NOT RLIKE";
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
+ }
+
+ private BetweenExpression createBetweenSegment(final PredicateContext ctx) {
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0));
+ ExpressionSegment between = (ExpressionSegment) visit(ctx.bitExpr(1));
+ ExpressionSegment and = (ExpressionSegment) visit(ctx.predicate());
+ boolean not = null != ctx.NOT();
+ return new BetweenExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, between, and, not);
+ }
+
+ @Override
+ public final ASTNode visitBitExpr(final BitExprContext ctx) {
+ if (null != ctx.simpleExpr()) {
+ return visit(ctx.simpleExpr());
+ }
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.getChild(0));
+ ExpressionSegment right = (ExpressionSegment) visit(ctx.getChild(2));
+ String operator = ctx.getChild(1).getText();
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text);
+ }
+
+ @Override
+ public final ASTNode visitSimpleExpr(final SimpleExprContext ctx) {
+ int startIndex = ctx.start.getStartIndex();
+ int stopIndex = ctx.stop.getStopIndex();
+ if (null != ctx.subquery()) {
+ SubquerySegment subquerySegment = new SubquerySegment(ctx.subquery().getStart().getStartIndex(), ctx.subquery().getStop().getStopIndex(),
+ (MySQLSelectStatement) visit(ctx.subquery()), getOriginalText(ctx.subquery()));
+ return null == ctx.EXISTS() ? new SubqueryExpressionSegment(subquerySegment) : new ExistsSubqueryExpression(startIndex, stopIndex, subquerySegment);
+ }
+ if (null != ctx.parameterMarker()) {
+ ParameterMarkerValue parameterMarker = (ParameterMarkerValue) visit(ctx.parameterMarker());
+ ParameterMarkerExpressionSegment result = new ParameterMarkerExpressionSegment(startIndex, stopIndex, parameterMarker.getValue(), parameterMarker.getType());
+ parameterMarkerSegments.add(result);
+ return result;
+ }
+ if (null != ctx.literals()) {
+ return SQLUtils.createLiteralExpression(visit(ctx.literals()), startIndex, stopIndex, ctx.literals().start.getInputStream().getText(new Interval(startIndex, stopIndex)));
+ }
+ if (null != ctx.intervalExpression()) {
+ return visit(ctx.intervalExpression());
+ }
+ if (null != ctx.functionCall()) {
+ return visit(ctx.functionCall());
+ }
+ if (null != ctx.collateClause()) {
+ if (null != ctx.simpleExpr()) {
+ ExpressionSegment expr = (ExpressionSegment) visit(ctx.simpleExpr(0));
+ return new CollateExpression(startIndex, stopIndex, (SimpleExpressionSegment) visit(ctx.collateClause()), expr);
+ }
+ return new CollateExpression(startIndex, stopIndex, (SimpleExpressionSegment) visit(ctx.collateClause()), null);
+ }
+ if (null != ctx.columnRef()) {
+ return visit(ctx.columnRef());
+ }
+ if (null != ctx.matchExpression()) {
+ return visit(ctx.matchExpression());
+ }
+ if (null != ctx.notOperator()) {
+ ASTNode expression = visit(ctx.simpleExpr(0));
+ if (expression instanceof ExistsSubqueryExpression) {
+ ((ExistsSubqueryExpression) expression).setNot(true);
+ return expression;
+ }
+ return new NotExpression(startIndex, stopIndex, (ExpressionSegment) expression, "!".equalsIgnoreCase(ctx.notOperator().getText()));
+ }
+ if (null != ctx.LP_() && 1 == ctx.expr().size()) {
+ return visit(ctx.expr(0));
+ }
+ if (null != ctx.OR_()) {
+ ExpressionSegment left = (ExpressionSegment) visit(ctx.simpleExpr(0));
+ ExpressionSegment right = (ExpressionSegment) visit(ctx.simpleExpr(1));
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, ctx.OR_().getText(), text);
+ }
+ return visitRemainSimpleExpr(ctx);
+ }
+
+ private ASTNode visitRemainSimpleExpr(final HiveStatementParser.SimpleExprContext ctx) {
+ if (null != ctx.caseExpression()) {
+ return visit(ctx.caseExpression());
+ }
+ if (null != ctx.BINARY()) {
+ return visit(ctx.simpleExpr(0));
+ }
+ if (null != ctx.variable()) {
+ return visit(ctx.variable());
+ }
+ for (HiveStatementParser.ExprContext each : ctx.expr()) {
+ visit(each);
+ }
+ for (HiveStatementParser.SimpleExprContext each : ctx.simpleExpr()) {
+ visit(each);
+ }
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), text);
+ }
+
+ @Override
+ public ASTNode visitColumnRef(final ColumnRefContext ctx) {
+ int identifierCount = ctx.identifier().size();
+ ColumnSegment result;
+ if (1 == identifierCount) {
+ result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier(0)));
+ } else if (2 == identifierCount) {
+ result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier(1)));
+ result.setOwner(new OwnerSegment(ctx.identifier(0).start.getStartIndex(), ctx.identifier(0).stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier(0))));
+ } else {
+ result = new ColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier(2)));
+ OwnerSegment owner = new OwnerSegment(ctx.identifier(1).start.getStartIndex(), ctx.identifier(1).stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier(1)));
+ owner.setOwner(new OwnerSegment(ctx.identifier(0).start.getStartIndex(), ctx.identifier(0).stop.getStopIndex(), (IdentifierValue) visit(ctx.identifier(0))));
+ result.setOwner(owner);
+ }
+ return result;
+ }
+
+ /**
+ * Get original text.
+ *
+ * @param ctx context
+ * @return original text
+ */
+ protected String getOriginalText(final ParserRuleContext ctx) {
+ return ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ }
+}
diff --git a/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/visitor/statement/HiveStatementVisitorFacade.java b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/visitor/statement/HiveStatementVisitorFacade.java
new file mode 100644
index 0000000000000..e07ed97e8c3b9
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/visitor/statement/HiveStatementVisitorFacade.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.sql.parser.hive.visitor.statement;
+
+import org.apache.shardingsphere.sql.parser.api.visitor.statement.type.DALStatementVisitor;
+import org.apache.shardingsphere.sql.parser.api.visitor.statement.type.DCLStatementVisitor;
+import org.apache.shardingsphere.sql.parser.api.visitor.statement.type.DDLStatementVisitor;
+import org.apache.shardingsphere.sql.parser.api.visitor.statement.type.DMLStatementVisitor;
+import org.apache.shardingsphere.sql.parser.api.visitor.statement.type.RLStatementVisitor;
+import org.apache.shardingsphere.sql.parser.api.visitor.statement.type.TCLStatementVisitor;
+import org.apache.shardingsphere.sql.parser.hive.visitor.statement.type.HiveDMLStatementVisitor;
+import org.apache.shardingsphere.sql.parser.spi.SQLStatementVisitorFacade;
+
+/**
+ * Statement visitor facade for hive.
+ */
+public final class HiveStatementVisitorFacade implements SQLStatementVisitorFacade {
+
+ @Override
+ public Class extends DMLStatementVisitor> getDMLVisitorClass() {
+ return HiveDMLStatementVisitor.class;
+ }
+
+ @Override
+ public Class extends DDLStatementVisitor> getDDLVisitorClass() {
+ throw new UnsupportedOperationException("");
+ }
+
+ @Override
+ public Class extends TCLStatementVisitor> getTCLVisitorClass() {
+ throw new UnsupportedOperationException("");
+ }
+
+ @Override
+ public Class extends DCLStatementVisitor> getDCLVisitorClass() {
+ throw new UnsupportedOperationException("");
+ }
+
+ @Override
+ public Class extends DALStatementVisitor> getDALVisitorClass() {
+ throw new UnsupportedOperationException("");
+ }
+
+ @Override
+ public Class extends RLStatementVisitor> getRLVisitorClass() {
+ throw new UnsupportedOperationException("");
+ }
+
+ @Override
+ public String getDatabaseType() {
+ return "Hive";
+ }
+}
diff --git a/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/visitor/statement/type/HiveDMLStatementVisitor.java b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/visitor/statement/type/HiveDMLStatementVisitor.java
new file mode 100644
index 0000000000000..987a71c42e4a0
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/java/org/apache/shardingsphere/sql/parser/hive/visitor/statement/type/HiveDMLStatementVisitor.java
@@ -0,0 +1,1318 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.sql.parser.hive.visitor.statement.type;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.antlr.v4.runtime.misc.Interval;
+import org.antlr.v4.runtime.tree.TerminalNode;
+import org.apache.shardingsphere.sql.parser.api.ASTNode;
+import org.apache.shardingsphere.sql.parser.api.visitor.statement.type.DMLStatementVisitor;
+import org.apache.shardingsphere.sql.parser.hive.visitor.statement.HiveStatementVisitor;
+import org.apache.shardingsphere.sql.parser.sql.common.enums.AggregationType;
+import org.apache.shardingsphere.sql.parser.sql.common.enums.CombineType;
+import org.apache.shardingsphere.sql.parser.sql.common.enums.JoinType;
+import org.apache.shardingsphere.sql.parser.sql.common.enums.OrderDirection;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dal.VariableSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.constraint.ConstraintSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.ColumnAssignmentSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.InsertValuesSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.SetAssignmentSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.InsertColumnsSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.OnDuplicateKeyColumnsSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.combine.CombineSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BetweenExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.CaseWhenExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExistsSubqueryExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ValuesExpression;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.complex.CommonExpressionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.ParameterMarkerExpressionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubqueryExpressionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.AggregationDistinctProjectionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.AggregationProjectionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ExpressionProjectionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionsSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ShorthandProjectionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.SubqueryProjectionSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.GroupBySegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.OrderBySegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.item.ColumnOrderByItemSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.item.ExpressionOrderByItemSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.item.IndexOrderByItemSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.item.OrderByItemSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.PaginationValueSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.limit.LimitSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.limit.NumberLiteralLimitValueSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.limit.ParameterMarkerLimitValueSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.HavingSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.LockSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.AliasSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.DataTypeLengthSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.DataTypeSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.ParameterMarkerSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.WindowItemSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.WindowSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.DeleteMultiTableSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.JoinTableSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SubqueryTableSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.util.SQLUtils;
+import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.BooleanLiteralValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.NumberLiteralValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.OtherLiteralValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.StringLiteralValue;
+import org.apache.shardingsphere.sql.parser.sql.common.value.parametermarker.ParameterMarkerValue;
+import org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.dml.HiveDeleteStatement;
+import org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.dml.HiveInsertStatement;
+import org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.dml.HiveSelectStatement;
+import org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.dml.HiveUpdateStatement;
+
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * DML statement visitor for hive.
+ */
+public final class HiveDMLStatementVisitor extends HiveStatementVisitor implements DMLStatementVisitor {
+
+ @Override
+ public ASTNode visitSubquery(final HiveStatementParser.SubqueryContext ctx) {
+ return visit(ctx.queryExpressionParens());
+ }
+
+ @Override
+ public ASTNode visitQueryExpressionParens(final HiveStatementParser.QueryExpressionParensContext ctx) {
+ if (null != ctx.queryExpressionParens()) {
+ return visit(ctx.queryExpressionParens());
+ }
+ HiveSelectStatement result = (HiveSelectStatement) visit(ctx.queryExpression());
+ if (null != ctx.lockClauseList()) {
+ result.setLock((LockSegment) visit(ctx.lockClauseList()));
+ }
+ result.addParameterMarkerSegments(getParameterMarkerSegments());
+ return result;
+ }
+
+ @Override
+ public ASTNode visitLockClauseList(final HiveStatementParser.LockClauseListContext ctx) {
+ LockSegment result = new LockSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
+ for (HiveStatementParser.LockClauseContext each : ctx.lockClause()) {
+ if (null != each.tableLockingList()) {
+ result.getTables().addAll(generateTablesFromTableAliasRefList(each.tableLockingList().tableAliasRefList()));
+ }
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitQueryExpression(final HiveStatementParser.QueryExpressionContext ctx) {
+ HiveSelectStatement result;
+ if (null != ctx.queryExpressionBody()) {
+ result = (HiveSelectStatement) visit(ctx.queryExpressionBody());
+ } else {
+ result = (HiveSelectStatement) visit(ctx.queryExpressionParens());
+ }
+ if (null != ctx.orderByClause()) {
+ result.setOrderBy((OrderBySegment) visit(ctx.orderByClause()));
+ }
+ if (null != ctx.limitClause()) {
+ result.setLimit((LimitSegment) visit(ctx.limitClause()));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitSelectWithInto(final HiveStatementParser.SelectWithIntoContext ctx) {
+ if (null != ctx.selectWithInto()) {
+ return visit(ctx.selectWithInto());
+ }
+ HiveSelectStatement result = (HiveSelectStatement) visit(ctx.queryExpression());
+ if (null != ctx.lockClauseList()) {
+ result.setLock((LockSegment) visit(ctx.lockClauseList()));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitQueryExpressionBody(final HiveStatementParser.QueryExpressionBodyContext ctx) {
+ if (1 == ctx.getChildCount() && ctx.getChild(0) instanceof HiveStatementParser.QueryPrimaryContext) {
+ return visit(ctx.queryPrimary());
+ }
+ if (null != ctx.queryExpressionBody()) {
+ HiveSelectStatement result = new HiveSelectStatement();
+ SubquerySegment left = new SubquerySegment(ctx.queryExpressionBody().start.getStartIndex(), ctx.queryExpressionBody().stop.getStopIndex(),
+ (HiveSelectStatement) visit(ctx.queryExpressionBody()), getOriginalText(ctx.queryExpressionBody()));
+ result.setProjections(left.getSelect().getProjections());
+ left.getSelect().getFrom().ifPresent(result::setFrom);
+ ((HiveSelectStatement) left.getSelect()).getTable().ifPresent(result::setTable);
+ result.setCombine(createCombineSegment(ctx.combineClause(), left));
+ return result;
+ }
+ if (null != ctx.queryExpressionParens()) {
+ HiveSelectStatement result = new HiveSelectStatement();
+ SubquerySegment left = new SubquerySegment(ctx.queryExpressionParens().start.getStartIndex(), ctx.queryExpressionParens().stop.getStopIndex(),
+ (HiveSelectStatement) visit(ctx.queryExpressionParens()), getOriginalText(ctx.queryExpressionParens()));
+ result.setProjections(left.getSelect().getProjections());
+ left.getSelect().getFrom().ifPresent(result::setFrom);
+ ((HiveSelectStatement) left.getSelect()).getTable().ifPresent(result::setTable);
+ result.setCombine(createCombineSegment(ctx.combineClause(), left));
+ return result;
+ }
+ return visit(ctx.queryExpressionParens());
+ }
+
+ private CombineSegment createCombineSegment(final HiveStatementParser.CombineClauseContext ctx, final SubquerySegment left) {
+ CombineType combineType;
+ if (null != ctx.EXCEPT()) {
+ combineType = CombineType.EXCEPT;
+ } else {
+ combineType = null == ctx.combineOption() || null == ctx.combineOption().ALL() ? CombineType.UNION : CombineType.UNION_ALL;
+ }
+ ParserRuleContext ruleContext = null == ctx.queryPrimary() ? ctx.queryExpressionParens() : ctx.queryPrimary();
+ SubquerySegment right = new SubquerySegment(ruleContext.start.getStartIndex(), ruleContext.stop.getStopIndex(), (HiveSelectStatement) visit(ruleContext), getOriginalText(ruleContext));
+ return new CombineSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), left, combineType, right);
+ }
+
+ @Override
+ public ASTNode visitQuerySpecification(final HiveStatementParser.QuerySpecificationContext ctx) {
+ HiveSelectStatement result = new HiveSelectStatement();
+ result.setProjections((ProjectionsSegment) visit(ctx.projections()));
+ if (null != ctx.selectSpecification()) {
+ result.getProjections().setDistinctRow(isDistinct(ctx));
+ }
+ if (null != ctx.fromClause()) {
+ if (null != ctx.fromClause().tableReferences()) {
+ TableSegment tableSource = (TableSegment) visit(ctx.fromClause().tableReferences());
+ result.setFrom(tableSource);
+ }
+ if (null != ctx.fromClause().DUAL()) {
+ TableSegment tableSource = new SimpleTableSegment(new TableNameSegment(ctx.fromClause().DUAL().getSymbol().getStartIndex(),
+ ctx.fromClause().DUAL().getSymbol().getStopIndex(), new IdentifierValue(ctx.fromClause().DUAL().getText())));
+ result.setFrom(tableSource);
+ }
+ }
+ if (null != ctx.whereClause()) {
+ result.setWhere((WhereSegment) visit(ctx.whereClause()));
+ }
+ if (null != ctx.groupByClause()) {
+ result.setGroupBy((GroupBySegment) visit(ctx.groupByClause()));
+ }
+ if (null != ctx.havingClause()) {
+ result.setHaving((HavingSegment) visit(ctx.havingClause()));
+ }
+ if (null != ctx.windowClause()) {
+ result.setWindow((WindowSegment) visit(ctx.windowClause()));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitTableValueConstructor(final HiveStatementParser.TableValueConstructorContext ctx) {
+ HiveSelectStatement result = new HiveSelectStatement();
+ int startIndex = ctx.getStart().getStartIndex();
+ int stopIndex = ctx.getStop().getStopIndex();
+ ValuesExpression valuesExpression = new ValuesExpression(startIndex, stopIndex);
+ valuesExpression.getRowConstructorList().addAll(createRowConstructorList(ctx.rowConstructorList()));
+ result.setProjections(new ProjectionsSegment(startIndex, stopIndex));
+ result.getProjections().getProjections().add(new ExpressionProjectionSegment(startIndex, stopIndex, getOriginalText(ctx), valuesExpression));
+ return result;
+ }
+
+ private Collection createRowConstructorList(final HiveStatementParser.RowConstructorListContext ctx) {
+ Collection result = new LinkedList<>();
+ for (HiveStatementParser.AssignmentValuesContext each : ctx.assignmentValues()) {
+ result.add((InsertValuesSegment) visit(each));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitTableStatement(final HiveStatementParser.TableStatementContext ctx) {
+ HiveSelectStatement result = new HiveSelectStatement();
+ if (null != ctx.TABLE()) {
+ result.setFrom(new SimpleTableSegment(new TableNameSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(),
+ new IdentifierValue(ctx.tableName().getText()))));
+ } else {
+ result.setTable((SimpleTableSegment) visit(ctx.tableName()));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitWindowClause(final HiveStatementParser.WindowClauseContext ctx) {
+ WindowSegment result = new WindowSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
+ for (HiveStatementParser.WindowItemContext each : ctx.windowItem()) {
+ result.getItemSegments().add((WindowItemSegment) visit(each));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitWindowItem(final HiveStatementParser.WindowItemContext ctx) {
+ WindowItemSegment result = new WindowItemSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
+ result.setWindowName(new IdentifierValue(ctx.identifier().getText()));
+ if (null != ctx.windowSpecification().PARTITION()) {
+ result.setPartitionListSegments(getExpressionsFromExprList(ctx.windowSpecification().expr()));
+ }
+ if (null != ctx.windowSpecification().orderByClause()) {
+ result.setOrderBySegment((OrderBySegment) visit(ctx.windowSpecification().orderByClause()));
+ }
+ if (null != ctx.windowSpecification().frameClause()) {
+ result.setFrameClause(new CommonExpressionSegment(ctx.windowSpecification().frameClause().start.getStartIndex(), ctx.windowSpecification().frameClause().stop.getStopIndex(),
+ ctx.windowSpecification().frameClause().getText()));
+ }
+ return result;
+ }
+
+ private Collection getExpressionsFromExprList(final List exprList) {
+ if (null == exprList) {
+ return Collections.emptyList();
+ }
+ Collection result = new ArrayList<>(exprList.size());
+ for (HiveStatementParser.ExprContext each : exprList) {
+ result.add((ExpressionSegment) visit(each));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitHavingClause(final HiveStatementParser.HavingClauseContext ctx) {
+ ExpressionSegment expr = (ExpressionSegment) visit(ctx.expr());
+ return new HavingSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), expr);
+ }
+
+ @Override
+ public ASTNode visitIntervalExpression(final HiveStatementParser.IntervalExpressionContext ctx) {
+ FunctionSegment result = new FunctionSegment(ctx.INTERVAL().getSymbol().getStartIndex(), ctx.INTERVAL().getSymbol().getStopIndex(), ctx.INTERVAL().getText(), ctx.INTERVAL().getText());
+ result.getParameters().add((ExpressionSegment) visit(ctx.intervalValue().expr()));
+ result.getParameters().add(new LiteralExpressionSegment(ctx.intervalValue().intervalUnit().getStart().getStartIndex(), ctx.intervalValue().intervalUnit().getStop().getStopIndex(),
+ ctx.intervalValue().intervalUnit().getText()));
+ return result;
+ }
+
+ @Override
+ public ASTNode visitFunctionCall(final HiveStatementParser.FunctionCallContext ctx) {
+ if (null != ctx.aggregationFunction()) {
+ return visit(ctx.aggregationFunction());
+ }
+ if (null != ctx.specialFunction()) {
+ return visit(ctx.specialFunction());
+ }
+ if (null != ctx.regularFunction()) {
+ return visit(ctx.regularFunction());
+ }
+ if (null != ctx.jsonFunction()) {
+ return visit(ctx.jsonFunction());
+ }
+ if (null != ctx.udfFunction()) {
+ return visit(ctx.udfFunction());
+ }
+ throw new IllegalStateException("FunctionCallContext must have aggregationFunction, regularFunction, specialFunction, jsonFunction or udfFunction.");
+ }
+
+ @Override
+ public ASTNode visitUdfFunction(final HiveStatementParser.UdfFunctionContext ctx) {
+ FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), getOriginalText(ctx), getOriginalText(ctx));
+ if (null != ctx.expr()) {
+ for (HiveStatementParser.ExprContext each : ctx.expr()) {
+ result.getParameters().add((ExpressionSegment) visit(each));
+ }
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitAggregationFunction(final HiveStatementParser.AggregationFunctionContext ctx) {
+ String aggregationType = ctx.aggregationFunctionName().getText();
+ return AggregationType.isAggregationType(aggregationType)
+ ? createAggregationSegment(ctx, aggregationType)
+ : new ExpressionProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), getOriginalText(ctx));
+ }
+
+ @Override
+ public ASTNode visitJsonFunction(final HiveStatementParser.JsonFunctionContext ctx) {
+ HiveStatementParser.JsonFunctionNameContext functionNameContext = ctx.jsonFunctionName();
+ String functionName;
+ if (null != functionNameContext) {
+ functionName = functionNameContext.getText();
+ for (HiveStatementParser.ExprContext each : ctx.expr()) {
+ visit(each);
+ }
+ } else if (null != ctx.JSON_SEPARATOR()) {
+ functionName = ctx.JSON_SEPARATOR().getText();
+ } else {
+ functionName = ctx.JSON_UNQUOTED_SEPARATOR().getText();
+ }
+ return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), functionName, getOriginalText(ctx));
+ }
+
+ private ASTNode createAggregationSegment(final HiveStatementParser.AggregationFunctionContext ctx, final String aggregationType) {
+ AggregationType type = AggregationType.valueOf(aggregationType.toUpperCase());
+ String innerExpression = ctx.start.getInputStream().getText(new Interval(ctx.LP_().getSymbol().getStartIndex(), ctx.stop.getStopIndex()));
+ if (null != ctx.distinct()) {
+ AggregationDistinctProjectionSegment result = new AggregationDistinctProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(),
+ type, innerExpression, getDistinctExpression(ctx));
+ result.getParameters().addAll(getExpressions(ctx));
+ return result;
+ }
+ AggregationProjectionSegment result = new AggregationProjectionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), type, innerExpression);
+ result.getParameters().addAll(getExpressions(ctx));
+ return result;
+ }
+
+ private Collection getExpressions(final HiveStatementParser.AggregationFunctionContext ctx) {
+ if (null == ctx.expr()) {
+ return Collections.emptyList();
+ }
+ Collection result = new LinkedList<>();
+ for (HiveStatementParser.ExprContext each : ctx.expr()) {
+ result.add((ExpressionSegment) visit(each));
+ }
+ return result;
+ }
+
+ private String getDistinctExpression(final HiveStatementParser.AggregationFunctionContext ctx) {
+ StringBuilder result = new StringBuilder();
+ for (int i = 3; i < ctx.getChildCount() - 1; i++) {
+ result.append(ctx.getChild(i).getText());
+ }
+ return result.toString();
+ }
+
+ @Override
+ public ASTNode visitSpecialFunction(final HiveStatementParser.SpecialFunctionContext ctx) {
+ if (null != ctx.groupConcatFunction()) {
+ return visit(ctx.groupConcatFunction());
+ }
+ if (null != ctx.windowFunction()) {
+ return visit(ctx.windowFunction());
+ }
+ if (null != ctx.castFunction()) {
+ return visit(ctx.castFunction());
+ }
+ if (null != ctx.convertFunction()) {
+ return visit(ctx.convertFunction());
+ }
+ if (null != ctx.positionFunction()) {
+ return visit(ctx.positionFunction());
+ }
+ if (null != ctx.substringFunction()) {
+ return visit(ctx.substringFunction());
+ }
+ if (null != ctx.extractFunction()) {
+ return visit(ctx.extractFunction());
+ }
+ if (null != ctx.charFunction()) {
+ return visit(ctx.charFunction());
+ }
+ if (null != ctx.trimFunction()) {
+ return visit(ctx.trimFunction());
+ }
+ if (null != ctx.weightStringFunction()) {
+ return visit(ctx.weightStringFunction());
+ }
+ if (null != ctx.valuesFunction()) {
+ return visit(ctx.valuesFunction());
+ }
+ if (null != ctx.currentUserFunction()) {
+ return visit(ctx.currentUserFunction());
+ }
+ return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), getOriginalText(ctx), getOriginalText(ctx));
+ }
+
+ @Override
+ public ASTNode visitGroupConcatFunction(final HiveStatementParser.GroupConcatFunctionContext ctx) {
+ calculateParameterCount(ctx.expr());
+ FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.GROUP_CONCAT().getText(), getOriginalText(ctx));
+ for (HiveStatementParser.ExprContext each : ctx.expr()) {
+ result.getParameters().add((ExpressionSegment) visit(each));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitWindowFunction(final HiveStatementParser.WindowFunctionContext ctx) {
+ super.visitWindowFunction(ctx);
+ return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.funcName.getText(), getOriginalText(ctx));
+ }
+
+ @Override
+ public ASTNode visitCastFunction(final HiveStatementParser.CastFunctionContext ctx) {
+ calculateParameterCount(ctx.expr());
+ FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.CAST().getText(), getOriginalText(ctx));
+ for (HiveStatementParser.ExprContext each : ctx.expr()) {
+ ASTNode expr = visit(each);
+ if (expr instanceof ColumnSegment) {
+ result.getParameters().add((ColumnSegment) expr);
+ } else if (expr instanceof LiteralExpressionSegment) {
+ result.getParameters().add((LiteralExpressionSegment) expr);
+ }
+ }
+ if (null != ctx.castType()) {
+ result.getParameters().add((DataTypeSegment) visit(ctx.castType()));
+ }
+ if (null != ctx.DATETIME()) {
+ DataTypeSegment dataType = new DataTypeSegment();
+ dataType.setDataTypeName(ctx.DATETIME().getText());
+ dataType.setStartIndex(ctx.DATETIME().getSymbol().getStartIndex());
+ dataType.setStopIndex(ctx.DATETIME().getSymbol().getStopIndex());
+ if (null != ctx.typeDatetimePrecision()) {
+ dataType.setDataLength((DataTypeLengthSegment) visit(ctx.typeDatetimePrecision()));
+ }
+ result.getParameters().add(dataType);
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitCastType(final HiveStatementParser.CastTypeContext ctx) {
+ DataTypeSegment result = new DataTypeSegment();
+ result.setDataTypeName(ctx.castTypeName.getText());
+ result.setStartIndex(ctx.start.getStartIndex());
+ result.setStopIndex(ctx.stop.getStopIndex());
+ if (null != ctx.fieldLength()) {
+ DataTypeLengthSegment dataTypeLengthSegment = (DataTypeLengthSegment) visit(ctx.fieldLength());
+ result.setDataLength(dataTypeLengthSegment);
+ }
+ if (null != ctx.precision()) {
+ DataTypeLengthSegment dataTypeLengthSegment = (DataTypeLengthSegment) visit(ctx.precision());
+ result.setDataLength(dataTypeLengthSegment);
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitConvertFunction(final HiveStatementParser.ConvertFunctionContext ctx) {
+ calculateParameterCount(Collections.singleton(ctx.expr()));
+ return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.CONVERT().getText(), getOriginalText(ctx));
+ }
+
+ @Override
+ public ASTNode visitPositionFunction(final HiveStatementParser.PositionFunctionContext ctx) {
+ calculateParameterCount(ctx.expr());
+ FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.POSITION().getText(), getOriginalText(ctx));
+ result.getParameters().add((LiteralExpressionSegment) visit(ctx.expr(0)));
+ result.getParameters().add((LiteralExpressionSegment) visit(ctx.expr(1)));
+ return result;
+ }
+
+ @Override
+ public ASTNode visitSubstringFunction(final HiveStatementParser.SubstringFunctionContext ctx) {
+ FunctionSegment result = new FunctionSegment(
+ ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), null == ctx.SUBSTR() ? ctx.SUBSTRING().getText() : ctx.SUBSTR().getText(), getOriginalText(ctx));
+ result.getParameters().add((ExpressionSegment) visit(ctx.expr()));
+ for (TerminalNode each : ctx.NUMBER_()) {
+ result.getParameters().add(new LiteralExpressionSegment(each.getSymbol().getStartIndex(), each.getSymbol().getStopIndex(), new NumberLiteralValue(each.getText()).getValue()));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitExtractFunction(final HiveStatementParser.ExtractFunctionContext ctx) {
+ calculateParameterCount(Collections.singleton(ctx.expr()));
+ FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.EXTRACT().getText(), getOriginalText(ctx));
+ result.getParameters().add(new LiteralExpressionSegment(ctx.identifier().getStart().getStartIndex(), ctx.identifier().getStop().getStopIndex(), ctx.identifier().getText()));
+ result.getParameters().add((LiteralExpressionSegment) visit(ctx.expr()));
+ return result;
+ }
+
+ @Override
+ public ASTNode visitCharFunction(final HiveStatementParser.CharFunctionContext ctx) {
+ calculateParameterCount(ctx.expr());
+ FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.CHAR().getText(), getOriginalText(ctx));
+ for (HiveStatementParser.ExprContext each : ctx.expr()) {
+ ASTNode expr = visit(each);
+ result.getParameters().add((ExpressionSegment) expr);
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitTrimFunction(final HiveStatementParser.TrimFunctionContext ctx) {
+ FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.TRIM().getText(), getOriginalText(ctx));
+ if (null != ctx.BOTH()) {
+ result.getParameters().add(new LiteralExpressionSegment(ctx.BOTH().getSymbol().getStartIndex(), ctx.BOTH().getSymbol().getStopIndex(),
+ new OtherLiteralValue(ctx.BOTH().getSymbol().getText()).getValue()));
+ }
+ if (null != ctx.TRAILING()) {
+ result.getParameters().add(new LiteralExpressionSegment(ctx.TRAILING().getSymbol().getStartIndex(), ctx.TRAILING().getSymbol().getStopIndex(),
+ new OtherLiteralValue(ctx.TRAILING().getSymbol().getText()).getValue()));
+ }
+ if (null != ctx.LEADING()) {
+ result.getParameters().add(new LiteralExpressionSegment(ctx.LEADING().getSymbol().getStartIndex(), ctx.LEADING().getSymbol().getStopIndex(),
+ new OtherLiteralValue(ctx.LEADING().getSymbol().getText()).getValue()));
+ }
+ for (HiveStatementParser.ExprContext each : ctx.expr()) {
+ result.getParameters().add((ExpressionSegment) visit(each));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitWeightStringFunction(final HiveStatementParser.WeightStringFunctionContext ctx) {
+ calculateParameterCount(Collections.singleton(ctx.expr()));
+ FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.WEIGHT_STRING().getText(), getOriginalText(ctx));
+ result.getParameters().add((ExpressionSegment) visit(ctx.expr()));
+ return result;
+ }
+
+ @Override
+ public ASTNode visitValuesFunction(final HiveStatementParser.ValuesFunctionContext ctx) {
+ FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.VALUES().getText(), getOriginalText(ctx));
+ if (!ctx.columnRefList().columnRef().isEmpty()) {
+ ColumnSegment columnSegment = (ColumnSegment) visit(ctx.columnRefList().columnRef(0));
+ result.getParameters().add(columnSegment);
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitCurrentUserFunction(final HiveStatementParser.CurrentUserFunctionContext ctx) {
+ return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.CURRENT_USER().getText(), getOriginalText(ctx));
+ }
+
+ @Override
+ public ASTNode visitRegularFunction(final HiveStatementParser.RegularFunctionContext ctx) {
+ return null == ctx.completeRegularFunction() ? visit(ctx.shorthandRegularFunction()) : visit(ctx.completeRegularFunction());
+ }
+
+ @Override
+ public ASTNode visitCompleteRegularFunction(final HiveStatementParser.CompleteRegularFunctionContext ctx) {
+ FunctionSegment result = new FunctionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.regularFunctionName().getText(), getOriginalText(ctx));
+ Collection expressionSegments = ctx.expr().stream().map(each -> (ExpressionSegment) visit(each)).collect(Collectors.toList());
+ result.getParameters().addAll(expressionSegments);
+ return result;
+ }
+
+ @Override
+ public ASTNode visitShorthandRegularFunction(final HiveStatementParser.ShorthandRegularFunctionContext ctx) {
+ String text = getOriginalText(ctx);
+ FunctionSegment result;
+ if (null != ctx.CURRENT_TIME()) {
+ result = new FunctionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.CURRENT_TIME().getText(), text);
+ if (null != ctx.NUMBER_()) {
+ result.getParameters().add(new LiteralExpressionSegment(ctx.NUMBER_().getSymbol().getStartIndex(), ctx.NUMBER_().getSymbol().getStopIndex(),
+ new NumberLiteralValue(ctx.NUMBER_().getText())));
+ }
+ } else {
+ result = new FunctionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), text);
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitCaseExpression(final HiveStatementParser.CaseExpressionContext ctx) {
+ Collection whenExprs = new LinkedList<>();
+ Collection thenExprs = new LinkedList<>();
+ for (HiveStatementParser.CaseWhenContext each : ctx.caseWhen()) {
+ whenExprs.add((ExpressionSegment) visit(each.expr(0)));
+ thenExprs.add((ExpressionSegment) visit(each.expr(1)));
+ }
+ ExpressionSegment caseExpr = null == ctx.simpleExpr() ? null : (ExpressionSegment) visit(ctx.simpleExpr());
+ ExpressionSegment elseExpr = null == ctx.caseElse() ? null : (ExpressionSegment) visit(ctx.caseElse().expr());
+ return new CaseWhenExpression(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), caseExpr, whenExprs, thenExprs, elseExpr);
+ }
+
+ @Override
+ public ASTNode visitVariable(final HiveStatementParser.VariableContext ctx) {
+ return null == ctx.systemVariable() ? visit(ctx.userVariable()) : visit(ctx.systemVariable());
+ }
+
+ @Override
+ public ASTNode visitUserVariable(final HiveStatementParser.UserVariableContext ctx) {
+ return new VariableSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.textOrIdentifier().getText());
+ }
+
+ @Override
+ public ASTNode visitSystemVariable(final HiveStatementParser.SystemVariableContext ctx) {
+ VariableSegment result = new VariableSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.rvalueSystemVariable().getText());
+ if (null != ctx.systemVariableScope) {
+ result.setScope(ctx.systemVariableScope.getText());
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitMatchExpression(final HiveStatementParser.MatchExpressionContext ctx) {
+ visit(ctx.expr());
+ String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), text);
+ }
+
+ // TODO :FIXME, sql case id: insert_with_str_to_date
+ private void calculateParameterCount(final Collection exprContexts) {
+ for (HiveStatementParser.ExprContext each : exprContexts) {
+ visit(each);
+ }
+ }
+
+ @Override
+ public ASTNode visitDataType(final HiveStatementParser.DataTypeContext ctx) {
+ DataTypeSegment result = new DataTypeSegment();
+ result.setDataTypeName(ctx.dataTypeName.getText());
+ result.setStartIndex(ctx.start.getStartIndex());
+ result.setStopIndex(ctx.stop.getStopIndex());
+ if (null != ctx.fieldLength()) {
+ DataTypeLengthSegment dataTypeLengthSegment = (DataTypeLengthSegment) visit(ctx.fieldLength());
+ result.setDataLength(dataTypeLengthSegment);
+ }
+ if (null != ctx.precision()) {
+ DataTypeLengthSegment dataTypeLengthSegment = (DataTypeLengthSegment) visit(ctx.precision());
+ result.setDataLength(dataTypeLengthSegment);
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitFieldLength(final HiveStatementParser.FieldLengthContext ctx) {
+ DataTypeLengthSegment result = new DataTypeLengthSegment();
+ result.setStartIndex(ctx.start.getStartIndex());
+ result.setStopIndex(ctx.stop.getStartIndex());
+ result.setPrecision(new BigDecimal(ctx.length.getText()).intValue());
+ return result;
+ }
+
+ @Override
+ public ASTNode visitPrecision(final HiveStatementParser.PrecisionContext ctx) {
+ DataTypeLengthSegment result = new DataTypeLengthSegment();
+ result.setStartIndex(ctx.start.getStartIndex());
+ result.setStopIndex(ctx.stop.getStartIndex());
+ List numbers = ctx.NUMBER_();
+ result.setPrecision(Integer.parseInt(numbers.get(0).getText()));
+ result.setScale(Integer.parseInt(numbers.get(1).getText()));
+ return result;
+ }
+
+ @Override
+ public ASTNode visitTypeDatetimePrecision(final HiveStatementParser.TypeDatetimePrecisionContext ctx) {
+ DataTypeLengthSegment result = new DataTypeLengthSegment();
+ result.setStartIndex(ctx.start.getStartIndex());
+ result.setStopIndex(ctx.stop.getStartIndex());
+ result.setPrecision(Integer.parseInt(ctx.NUMBER_().getText()));
+ return result;
+ }
+
+ @Override
+ public ASTNode visitOrderByClause(final HiveStatementParser.OrderByClauseContext ctx) {
+ Collection items = new LinkedList<>();
+ for (HiveStatementParser.OrderByItemContext each : ctx.orderByItem()) {
+ items.add((OrderByItemSegment) visit(each));
+ }
+ return new OrderBySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), items);
+ }
+
+ @Override
+ public ASTNode visitOrderByItem(final HiveStatementParser.OrderByItemContext ctx) {
+ OrderDirection orderDirection;
+ if (null != ctx.direction()) {
+ orderDirection = null == ctx.direction().DESC() ? OrderDirection.ASC : OrderDirection.DESC;
+ } else {
+ orderDirection = OrderDirection.ASC;
+ }
+ if (null != ctx.numberLiterals()) {
+ return new IndexOrderByItemSegment(ctx.numberLiterals().getStart().getStartIndex(), ctx.numberLiterals().getStop().getStopIndex(),
+ SQLUtils.getExactlyNumber(ctx.numberLiterals().getText(), 10).intValue(), orderDirection, null);
+ } else {
+ ASTNode expr = visitExpr(ctx.expr());
+ if (expr instanceof ColumnSegment) {
+ return new ColumnOrderByItemSegment((ColumnSegment) expr, orderDirection, null);
+ } else {
+ return new ExpressionOrderByItemSegment(ctx.expr().getStart().getStartIndex(),
+ ctx.expr().getStop().getStopIndex(), getOriginalText(ctx.expr()), orderDirection, null, (ExpressionSegment) expr);
+ }
+ }
+ }
+
+ @Override
+ public ASTNode visitInsert(final HiveStatementParser.InsertContext ctx) {
+ // TODO :FIXME, since there is no segment for insertValuesClause, InsertStatement is created by sub rule.
+ HiveInsertStatement result;
+ if (null != ctx.insertValuesClause()) {
+ result = (HiveInsertStatement) visit(ctx.insertValuesClause());
+ } else if (null != ctx.insertSelectClause()) {
+ result = (HiveInsertStatement) visit(ctx.insertSelectClause());
+ } else {
+ result = new HiveInsertStatement();
+ result.setSetAssignment((SetAssignmentSegment) visit(ctx.setAssignmentsClause()));
+ }
+ if (null != ctx.onDuplicateKeyClause()) {
+ result.setOnDuplicateKeyColumns((OnDuplicateKeyColumnsSegment) visit(ctx.onDuplicateKeyClause()));
+ }
+ result.setTable((SimpleTableSegment) visit(ctx.tableName()));
+ result.addParameterMarkerSegments(getParameterMarkerSegments());
+ return result;
+ }
+
+ @Override
+ public ASTNode visitInsertSelectClause(final HiveStatementParser.InsertSelectClauseContext ctx) {
+ HiveInsertStatement result = new HiveInsertStatement();
+ if (null != ctx.LP_()) {
+ if (null != ctx.fields()) {
+ result.setInsertColumns(new InsertColumnsSegment(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex(), createInsertColumns(ctx.fields())));
+ } else {
+ result.setInsertColumns(new InsertColumnsSegment(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex(), Collections.emptyList()));
+ }
+ } else {
+ result.setInsertColumns(new InsertColumnsSegment(ctx.start.getStartIndex() - 1, ctx.start.getStartIndex() - 1, Collections.emptyList()));
+ }
+ result.setInsertSelect(createInsertSelectSegment(ctx));
+ return result;
+ }
+
+ private SubquerySegment createInsertSelectSegment(final HiveStatementParser.InsertSelectClauseContext ctx) {
+ HiveSelectStatement selectStatement = (HiveSelectStatement) visit(ctx.select());
+ return new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement, getOriginalText(ctx.select()));
+ }
+
+ @Override
+ public ASTNode visitInsertValuesClause(final HiveStatementParser.InsertValuesClauseContext ctx) {
+ HiveInsertStatement result = new HiveInsertStatement();
+ if (null != ctx.LP_()) {
+ if (null != ctx.fields()) {
+ result.setInsertColumns(new InsertColumnsSegment(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex(), createInsertColumns(ctx.fields())));
+ } else {
+ result.setInsertColumns(new InsertColumnsSegment(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex(), Collections.emptyList()));
+ }
+ } else {
+ result.setInsertColumns(new InsertColumnsSegment(ctx.start.getStartIndex() - 1, ctx.start.getStartIndex() - 1, Collections.emptyList()));
+ }
+ result.getValues().addAll(createInsertValuesSegments(ctx.assignmentValues()));
+ return result;
+ }
+
+ private Collection createInsertValuesSegments(final Collection assignmentValuesContexts) {
+ Collection result = new LinkedList<>();
+ for (HiveStatementParser.AssignmentValuesContext each : assignmentValuesContexts) {
+ result.add((InsertValuesSegment) visit(each));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitOnDuplicateKeyClause(final HiveStatementParser.OnDuplicateKeyClauseContext ctx) {
+ Collection columns = new LinkedList<>();
+ for (HiveStatementParser.AssignmentContext each : ctx.assignment()) {
+ columns.add((ColumnAssignmentSegment) visit(each));
+ }
+ return new OnDuplicateKeyColumnsSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columns);
+ }
+
+ private List createInsertColumns(final HiveStatementParser.FieldsContext fields) {
+ List result = new LinkedList<>();
+ for (HiveStatementParser.InsertIdentifierContext each : fields.insertIdentifier()) {
+ result.add((ColumnSegment) visit(each));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitUpdate(final HiveStatementParser.UpdateContext ctx) {
+ HiveUpdateStatement result = new HiveUpdateStatement();
+ TableSegment tableSegment = (TableSegment) visit(ctx.tableReferences());
+ result.setTable(tableSegment);
+ result.setSetAssignment((SetAssignmentSegment) visit(ctx.setAssignmentsClause()));
+ if (null != ctx.whereClause()) {
+ result.setWhere((WhereSegment) visit(ctx.whereClause()));
+ }
+ if (null != ctx.orderByClause()) {
+ result.setOrderBy((OrderBySegment) visit(ctx.orderByClause()));
+ }
+ if (null != ctx.limitClause()) {
+ result.setLimit((LimitSegment) visit(ctx.limitClause()));
+ }
+ result.addParameterMarkerSegments(getParameterMarkerSegments());
+ return result;
+ }
+
+ @Override
+ public ASTNode visitSetAssignmentsClause(final HiveStatementParser.SetAssignmentsClauseContext ctx) {
+ Collection assignments = new LinkedList<>();
+ for (HiveStatementParser.AssignmentContext each : ctx.assignment()) {
+ assignments.add((ColumnAssignmentSegment) visit(each));
+ }
+ return new SetAssignmentSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), assignments);
+ }
+
+ @Override
+ public ASTNode visitAssignmentValues(final HiveStatementParser.AssignmentValuesContext ctx) {
+ List segments = new LinkedList<>();
+ for (HiveStatementParser.AssignmentValueContext each : ctx.assignmentValue()) {
+ segments.add((ExpressionSegment) visit(each));
+ }
+ return new InsertValuesSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), segments);
+ }
+
+ @Override
+ public ASTNode visitAssignment(final HiveStatementParser.AssignmentContext ctx) {
+ ColumnSegment column = (ColumnSegment) visit(ctx.columnRef());
+ ExpressionSegment value = (ExpressionSegment) visit(ctx.assignmentValue());
+ List columnSegments = new LinkedList<>();
+ columnSegments.add(column);
+ return new ColumnAssignmentSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnSegments, value);
+ }
+
+ @Override
+ public ASTNode visitAssignmentValue(final HiveStatementParser.AssignmentValueContext ctx) {
+ HiveStatementParser.ExprContext expr = ctx.expr();
+ if (null != expr) {
+ ASTNode result = visit(expr);
+ if (result instanceof ColumnSegment) {
+ return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getText());
+ } else {
+ return result;
+ }
+ }
+ return new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getText());
+ }
+
+ @Override
+ public ASTNode visitBlobValue(final HiveStatementParser.BlobValueContext ctx) {
+ return new StringLiteralValue(ctx.string_().getText());
+ }
+
+ @Override
+ public ASTNode visitDelete(final HiveStatementParser.DeleteContext ctx) {
+ HiveDeleteStatement result = new HiveDeleteStatement();
+ if (null != ctx.multipleTablesClause()) {
+ result.setTable((TableSegment) visit(ctx.multipleTablesClause()));
+ } else {
+ result.setTable((TableSegment) visit(ctx.singleTableClause()));
+ }
+ if (null != ctx.whereClause()) {
+ result.setWhere((WhereSegment) visit(ctx.whereClause()));
+ }
+ if (null != ctx.orderByClause()) {
+ result.setOrderBy((OrderBySegment) visit(ctx.orderByClause()));
+ }
+ if (null != ctx.limitClause()) {
+ result.setLimit((LimitSegment) visit(ctx.limitClause()));
+ }
+ result.addParameterMarkerSegments(getParameterMarkerSegments());
+ return result;
+ }
+
+ @Override
+ public ASTNode visitSingleTableClause(final HiveStatementParser.SingleTableClauseContext ctx) {
+ SimpleTableSegment result = (SimpleTableSegment) visit(ctx.tableName());
+ if (null != ctx.alias()) {
+ result.setAlias((AliasSegment) visit(ctx.alias()));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitMultipleTablesClause(final HiveStatementParser.MultipleTablesClauseContext ctx) {
+ DeleteMultiTableSegment result = new DeleteMultiTableSegment();
+ TableSegment relateTableSource = (TableSegment) visit(ctx.tableReferences());
+ result.setRelationTable(relateTableSource);
+ result.setActualDeleteTables(generateTablesFromTableAliasRefList(ctx.tableAliasRefList()));
+ return result;
+ }
+
+ private List generateTablesFromTableAliasRefList(final HiveStatementParser.TableAliasRefListContext ctx) {
+ List result = new LinkedList<>();
+ for (HiveStatementParser.TableIdentOptWildContext each : ctx.tableIdentOptWild()) {
+ result.add((SimpleTableSegment) visit(each.tableName()));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitSelect(final HiveStatementParser.SelectContext ctx) {
+ // TODO :Unsupported for withClause.
+ HiveSelectStatement result;
+ if (null != ctx.queryExpression()) {
+ result = (HiveSelectStatement) visit(ctx.queryExpression());
+ if (null != ctx.lockClauseList()) {
+ result.setLock((LockSegment) visit(ctx.lockClauseList()));
+ }
+ } else if (null != ctx.selectWithInto()) {
+ result = (HiveSelectStatement) visit(ctx.selectWithInto());
+ } else {
+ result = (HiveSelectStatement) visit(ctx.getChild(0));
+ }
+ result.addParameterMarkerSegments(getParameterMarkerSegments());
+ return result;
+ }
+
+ private boolean isDistinct(final HiveStatementParser.QuerySpecificationContext ctx) {
+ for (HiveStatementParser.SelectSpecificationContext each : ctx.selectSpecification()) {
+ if (((BooleanLiteralValue) visit(each)).getValue()) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ public ASTNode visitSelectSpecification(final HiveStatementParser.SelectSpecificationContext ctx) {
+ if (null != ctx.duplicateSpecification()) {
+ return visit(ctx.duplicateSpecification());
+ }
+ return new BooleanLiteralValue(false);
+ }
+
+ @Override
+ public ASTNode visitDuplicateSpecification(final HiveStatementParser.DuplicateSpecificationContext ctx) {
+ String text = ctx.getText();
+ if ("DISTINCT".equalsIgnoreCase(text) || "DISTINCTROW".equalsIgnoreCase(text)) {
+ return new BooleanLiteralValue(true);
+ }
+ return new BooleanLiteralValue(false);
+ }
+
+ @Override
+ public ASTNode visitProjections(final HiveStatementParser.ProjectionsContext ctx) {
+ Collection projections = new LinkedList<>();
+ if (null != ctx.unqualifiedShorthand()) {
+ projections.add(new ShorthandProjectionSegment(ctx.unqualifiedShorthand().getStart().getStartIndex(), ctx.unqualifiedShorthand().getStop().getStopIndex()));
+ }
+ for (HiveStatementParser.ProjectionContext each : ctx.projection()) {
+ projections.add((ProjectionSegment) visit(each));
+ }
+ ProjectionsSegment result = new ProjectionsSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
+ result.getProjections().addAll(projections);
+ return result;
+ }
+
+ @Override
+ public ASTNode visitProjection(final HiveStatementParser.ProjectionContext ctx) {
+ // FIXME :The stop index of project is the stop index of projection, instead of alias.
+ if (null != ctx.qualifiedShorthand()) {
+ return createShorthandProjection(ctx.qualifiedShorthand());
+ }
+ AliasSegment alias = null == ctx.alias() ? null : (AliasSegment) visit(ctx.alias());
+ ASTNode exprProjection = visit(ctx.expr());
+ if (exprProjection instanceof ColumnSegment) {
+ ColumnProjectionSegment result = new ColumnProjectionSegment((ColumnSegment) exprProjection);
+ result.setAlias(alias);
+ return result;
+ }
+ if (exprProjection instanceof SubquerySegment) {
+ SubquerySegment subquerySegment = (SubquerySegment) exprProjection;
+ String text = ctx.start.getInputStream().getText(new Interval(subquerySegment.getStartIndex(), subquerySegment.getStopIndex()));
+ SubqueryProjectionSegment result = new SubqueryProjectionSegment((SubquerySegment) exprProjection, text);
+ result.setAlias(alias);
+ return result;
+ }
+ if (exprProjection instanceof ExistsSubqueryExpression) {
+ ExistsSubqueryExpression existsSubqueryExpression = (ExistsSubqueryExpression) exprProjection;
+ String text = ctx.start.getInputStream().getText(new Interval(existsSubqueryExpression.getStartIndex(), existsSubqueryExpression.getStopIndex()));
+ SubqueryProjectionSegment result = new SubqueryProjectionSegment(((ExistsSubqueryExpression) exprProjection).getSubquery(), text);
+ result.setAlias(alias);
+ return result;
+ }
+ return createProjection(ctx, alias, exprProjection);
+ }
+
+ private ShorthandProjectionSegment createShorthandProjection(final HiveStatementParser.QualifiedShorthandContext shorthand) {
+ ShorthandProjectionSegment result = new ShorthandProjectionSegment(shorthand.getStart().getStartIndex(), shorthand.getStop().getStopIndex());
+ HiveStatementParser.IdentifierContext identifier = shorthand.identifier().get(shorthand.identifier().size() - 1);
+ OwnerSegment owner = new OwnerSegment(identifier.getStart().getStartIndex(), identifier.getStop().getStopIndex(), new IdentifierValue(identifier.getText()));
+ result.setOwner(owner);
+ if (shorthand.identifier().size() > 1) {
+ HiveStatementParser.IdentifierContext schemaIdentifier = shorthand.identifier().get(0);
+ owner.setOwner(new OwnerSegment(schemaIdentifier.getStart().getStartIndex(), schemaIdentifier.getStop().getStopIndex(), new IdentifierValue(schemaIdentifier.getText())));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitAlias(final HiveStatementParser.AliasContext ctx) {
+ return new AliasSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), new IdentifierValue(ctx.textOrIdentifier().getText()));
+ }
+
+ private ASTNode createProjection(final HiveStatementParser.ProjectionContext ctx, final AliasSegment alias, final ASTNode projection) {
+ if (projection instanceof AggregationProjectionSegment) {
+ ((AggregationProjectionSegment) projection).setAlias(alias);
+ return projection;
+ }
+ if (projection instanceof ExpressionProjectionSegment) {
+ ((ExpressionProjectionSegment) projection).setAlias(alias);
+ return projection;
+ }
+ if (projection instanceof FunctionSegment) {
+ FunctionSegment functionSegment = (FunctionSegment) projection;
+ ExpressionProjectionSegment result = new ExpressionProjectionSegment(functionSegment.getStartIndex(), functionSegment.getStopIndex(), functionSegment.getText(), functionSegment);
+ result.setAlias(alias);
+ return result;
+ }
+ if (projection instanceof CommonExpressionSegment) {
+ CommonExpressionSegment segment = (CommonExpressionSegment) projection;
+ ExpressionProjectionSegment result = new ExpressionProjectionSegment(segment.getStartIndex(), segment.getStopIndex(), segment.getText(), segment);
+ result.setAlias(alias);
+ return result;
+ }
+ // FIXME :For DISTINCT()
+ if (projection instanceof ColumnSegment) {
+ ExpressionProjectionSegment result = new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx), (ColumnSegment) projection);
+ result.setAlias(alias);
+ return result;
+ }
+ if (projection instanceof SubqueryExpressionSegment) {
+ SubqueryExpressionSegment subqueryExpressionSegment = (SubqueryExpressionSegment) projection;
+ String text = ctx.start.getInputStream().getText(new Interval(subqueryExpressionSegment.getStartIndex(), subqueryExpressionSegment.getStopIndex()));
+ SubqueryProjectionSegment result = new SubqueryProjectionSegment(subqueryExpressionSegment.getSubquery(), text);
+ result.setAlias(alias);
+ return result;
+ }
+ if (projection instanceof BinaryOperationExpression) {
+ int startIndex = ((BinaryOperationExpression) projection).getStartIndex();
+ int stopIndex = null == alias ? ((BinaryOperationExpression) projection).getStopIndex() : alias.getStopIndex();
+ ExpressionProjectionSegment result = new ExpressionProjectionSegment(startIndex, stopIndex, ((BinaryOperationExpression) projection).getText(), (BinaryOperationExpression) projection);
+ result.setAlias(alias);
+ return result;
+ }
+ if (projection instanceof ParameterMarkerExpressionSegment) {
+ ParameterMarkerExpressionSegment result = (ParameterMarkerExpressionSegment) projection;
+ result.setAlias(alias);
+ return projection;
+ }
+ if (projection instanceof CaseWhenExpression) {
+ ExpressionProjectionSegment result = new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx.expr()), (CaseWhenExpression) projection);
+ result.setAlias(alias);
+ return result;
+ }
+ if (projection instanceof VariableSegment) {
+ ExpressionProjectionSegment result = new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx.expr()), (VariableSegment) projection);
+ result.setAlias(alias);
+ return result;
+ }
+ if (projection instanceof BetweenExpression) {
+ ExpressionProjectionSegment result = new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx.expr()), (BetweenExpression) projection);
+ result.setAlias(alias);
+ return result;
+ }
+ if (projection instanceof InExpression) {
+ ExpressionProjectionSegment result = new ExpressionProjectionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), getOriginalText(ctx.expr()), (InExpression) projection);
+ result.setAlias(alias);
+ return result;
+ }
+ LiteralExpressionSegment column = (LiteralExpressionSegment) projection;
+ ExpressionProjectionSegment result = null == alias
+ ? new ExpressionProjectionSegment(column.getStartIndex(), column.getStopIndex(), String.valueOf(column.getLiterals()), column)
+ : new ExpressionProjectionSegment(column.getStartIndex(), ctx.alias().stop.getStopIndex(), String.valueOf(column.getLiterals()), column);
+ result.setAlias(alias);
+ return result;
+ }
+
+ @Override
+ public ASTNode visitFromClause(final HiveStatementParser.FromClauseContext ctx) {
+ return visit(ctx.tableReferences());
+ }
+
+ @Override
+ public ASTNode visitTableReferences(final HiveStatementParser.TableReferencesContext ctx) {
+ TableSegment result = (TableSegment) visit(ctx.tableReference(0));
+ if (ctx.tableReference().size() > 1) {
+ for (int i = 1; i < ctx.tableReference().size(); i++) {
+ result = generateJoinTableSourceFromEscapedTableReference(ctx.tableReference(i), result);
+ }
+ }
+ return result;
+ }
+
+ private JoinTableSegment generateJoinTableSourceFromEscapedTableReference(final HiveStatementParser.TableReferenceContext ctx, final TableSegment tableSegment) {
+ JoinTableSegment result = new JoinTableSegment();
+ result.setStartIndex(tableSegment.getStartIndex());
+ result.setStopIndex(ctx.stop.getStopIndex());
+ result.setLeft(tableSegment);
+ result.setJoinType(JoinType.COMMA.name());
+ result.setRight((TableSegment) visit(ctx));
+ return result;
+ }
+
+ @Override
+ public ASTNode visitEscapedTableReference(final HiveStatementParser.EscapedTableReferenceContext ctx) {
+ TableSegment result;
+ TableSegment left;
+ left = (TableSegment) visit(ctx.tableFactor());
+ for (HiveStatementParser.JoinedTableContext each : ctx.joinedTable()) {
+ left = visitJoinedTable(each, left);
+ }
+ result = left;
+ return result;
+ }
+
+ @Override
+ public ASTNode visitTableReference(final HiveStatementParser.TableReferenceContext ctx) {
+ TableSegment result;
+ TableSegment left;
+ left = null == ctx.tableFactor() ? (TableSegment) visit(ctx.escapedTableReference()) : (TableSegment) visit(ctx.tableFactor());
+ for (HiveStatementParser.JoinedTableContext each : ctx.joinedTable()) {
+ left = visitJoinedTable(each, left);
+ }
+ result = left;
+ return result;
+ }
+
+ @Override
+ public ASTNode visitTableFactor(final HiveStatementParser.TableFactorContext ctx) {
+ if (null != ctx.subquery()) {
+ HiveSelectStatement subquery = (HiveSelectStatement) visit(ctx.subquery());
+ SubquerySegment subquerySegment = new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), subquery, getOriginalText(ctx.subquery()));
+ SubqueryTableSegment result = new SubqueryTableSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), subquerySegment);
+ if (null != ctx.alias()) {
+ result.setAlias((AliasSegment) visit(ctx.alias()));
+ }
+ return result;
+ }
+ if (null != ctx.tableName()) {
+ SimpleTableSegment result = (SimpleTableSegment) visit(ctx.tableName());
+ if (null != ctx.alias()) {
+ result.setAlias((AliasSegment) visit(ctx.alias()));
+ }
+ return result;
+ }
+ return visit(ctx.tableReferences());
+ }
+
+ private JoinTableSegment visitJoinedTable(final HiveStatementParser.JoinedTableContext ctx, final TableSegment tableSegment) {
+ JoinTableSegment result = new JoinTableSegment();
+ result.setLeft(tableSegment);
+ result.setStartIndex(tableSegment.getStartIndex());
+ result.setStopIndex(ctx.stop.getStopIndex());
+ result.setJoinType(getJoinType(ctx));
+ result.setNatural(null != ctx.naturalJoinType());
+ TableSegment right = null == ctx.tableFactor() ? (TableSegment) visit(ctx.tableReference()) : (TableSegment) visit(ctx.tableFactor());
+ result.setRight(right);
+ return null == ctx.joinSpecification() ? result : visitJoinSpecification(ctx.joinSpecification(), result);
+ }
+
+ private String getJoinType(final HiveStatementParser.JoinedTableContext ctx) {
+ if (null != ctx.innerJoinType()) {
+ return JoinType.INNER.name();
+ }
+ if (null != ctx.outerJoinType()) {
+ return null == ctx.outerJoinType().LEFT() ? JoinType.RIGHT.name() : JoinType.LEFT.name();
+ }
+ if (null != ctx.naturalJoinType()) {
+ return getNaturalJoinType(ctx.naturalJoinType());
+ }
+ return JoinType.COMMA.name();
+ }
+
+ private String getNaturalJoinType(final HiveStatementParser.NaturalJoinTypeContext ctx) {
+ if (null != ctx.LEFT()) {
+ return JoinType.LEFT.name();
+ } else if (null != ctx.RIGHT()) {
+ return JoinType.RIGHT.name();
+ } else {
+ return JoinType.INNER.name();
+ }
+ }
+
+ private JoinTableSegment visitJoinSpecification(final HiveStatementParser.JoinSpecificationContext ctx, final JoinTableSegment result) {
+ if (null != ctx.expr()) {
+ ExpressionSegment condition = (ExpressionSegment) visit(ctx.expr());
+ result.setCondition(condition);
+ }
+ if (null != ctx.USING()) {
+ result.setUsing(ctx.columnNames().columnName().stream().map(each -> (ColumnSegment) visit(each)).collect(Collectors.toList()));
+ }
+ return result;
+ }
+
+ @Override
+ public ASTNode visitWhereClause(final HiveStatementParser.WhereClauseContext ctx) {
+ ASTNode segment = visit(ctx.expr());
+ return new WhereSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (ExpressionSegment) segment);
+ }
+
+ @Override
+ public ASTNode visitGroupByClause(final HiveStatementParser.GroupByClauseContext ctx) {
+ Collection items = new LinkedList<>();
+ for (HiveStatementParser.OrderByItemContext each : ctx.orderByItem()) {
+ items.add((OrderByItemSegment) visit(each));
+ }
+ return new GroupBySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), items);
+ }
+
+ @Override
+ public ASTNode visitLimitClause(final HiveStatementParser.LimitClauseContext ctx) {
+ if (null == ctx.limitOffset()) {
+ return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), null, (PaginationValueSegment) visit(ctx.limitRowCount()));
+ }
+ PaginationValueSegment rowCount;
+ PaginationValueSegment offset;
+ if (null != ctx.OFFSET()) {
+ rowCount = (PaginationValueSegment) visit(ctx.limitRowCount());
+ offset = (PaginationValueSegment) visit(ctx.limitOffset());
+ } else {
+ offset = (PaginationValueSegment) visit(ctx.limitOffset());
+ rowCount = (PaginationValueSegment) visit(ctx.limitRowCount());
+ }
+ return new LimitSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), offset, rowCount);
+ }
+
+ @Override
+ public ASTNode visitLimitRowCount(final HiveStatementParser.LimitRowCountContext ctx) {
+ if (null != ctx.numberLiterals()) {
+ return new NumberLiteralLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((NumberLiteralValue) visit(ctx.numberLiterals())).getValue().longValue());
+ }
+ ParameterMarkerSegment result = new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(),
+ ((ParameterMarkerValue) visit(ctx.parameterMarker())).getValue());
+ getParameterMarkerSegments().add(result);
+ return result;
+ }
+
+ @Override
+ public ASTNode visitConstraintName(final HiveStatementParser.ConstraintNameContext ctx) {
+ return new ConstraintSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (IdentifierValue) visit(ctx.identifier()));
+ }
+
+ @Override
+ public ASTNode visitLimitOffset(final HiveStatementParser.LimitOffsetContext ctx) {
+ if (null != ctx.numberLiterals()) {
+ return new NumberLiteralLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ((NumberLiteralValue) visit(ctx.numberLiterals())).getValue().longValue());
+ }
+ ParameterMarkerSegment result = new ParameterMarkerLimitValueSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(),
+ ((ParameterMarkerValue) visit(ctx.parameterMarker())).getValue());
+ getParameterMarkerSegments().add(result);
+ return result;
+ }
+
+ @Override
+ public ASTNode visitCollateClause(final HiveStatementParser.CollateClauseContext ctx) {
+ if (null != ctx.collationName()) {
+ return new LiteralExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.collationName().textOrIdentifier().getText());
+ }
+ ParameterMarkerExpressionSegment result = new ParameterMarkerExpressionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(),
+ ((ParameterMarkerValue) visit(ctx.parameterMarker())).getValue());
+ getParameterMarkerSegments().add(result);
+ return result;
+ }
+}
diff --git a/parser/sql/dialect/hive/src/main/resources/META-INF/services/org.apache.shardingsphere.sql.parser.spi.DialectSQLParserFacade b/parser/sql/dialect/hive/src/main/resources/META-INF/services/org.apache.shardingsphere.sql.parser.spi.DialectSQLParserFacade
new file mode 100644
index 0000000000000..cf4d6d14a2602
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/resources/META-INF/services/org.apache.shardingsphere.sql.parser.spi.DialectSQLParserFacade
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.shardingsphere.sql.parser.hive.parser.HiveParserFacade
diff --git a/parser/sql/dialect/hive/src/main/resources/META-INF/services/org.apache.shardingsphere.sql.parser.spi.SQLStatementVisitorFacade b/parser/sql/dialect/hive/src/main/resources/META-INF/services/org.apache.shardingsphere.sql.parser.spi.SQLStatementVisitorFacade
new file mode 100644
index 0000000000000..3e8d35ca0bff3
--- /dev/null
+++ b/parser/sql/dialect/hive/src/main/resources/META-INF/services/org.apache.shardingsphere.sql.parser.spi.SQLStatementVisitorFacade
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.shardingsphere.sql.parser.hive.visitor.statement.HiveStatementVisitorFacade
diff --git a/parser/sql/dialect/pom.xml b/parser/sql/dialect/pom.xml
index 2ebc83a049eff..02e0771fce6cb 100644
--- a/parser/sql/dialect/pom.xml
+++ b/parser/sql/dialect/pom.xml
@@ -35,6 +35,7 @@
opengauss
clickhouse
doris
+ hive
diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/HiveStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/HiveStatement.java
new file mode 100644
index 0000000000000..e8a6f1b6073a7
--- /dev/null
+++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/HiveStatement.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive;
+
+import org.apache.shardingsphere.infra.database.core.type.DatabaseType;
+import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement;
+
+/**
+ * Hive statement.
+ */
+public interface HiveStatement extends SQLStatement {
+
+ @Override
+ default DatabaseType getDatabaseType() {
+ return TypedSPILoader.getService(DatabaseType.class, "Hive");
+ }
+}
diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveDeleteStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveDeleteStatement.java
new file mode 100644
index 0000000000000..7c92ebd9fc568
--- /dev/null
+++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveDeleteStatement.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.dml;
+
+import lombok.Setter;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.OrderBySegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.limit.LimitSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.DeleteStatement;
+import org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.HiveStatement;
+
+import java.util.Optional;
+
+/**
+ * Hive delete statement.
+ */
+@Setter
+public final class HiveDeleteStatement extends DeleteStatement implements HiveStatement {
+
+ private OrderBySegment orderBy;
+
+ private LimitSegment limit;
+
+ /**
+ * Get order by segment.
+ *
+ * @return order by segment
+ */
+ public Optional getOrderBy() {
+ return Optional.ofNullable(orderBy);
+ }
+
+ /**
+ * Get limit segment.
+ *
+ * @return limit segment
+ */
+ public Optional getLimit() {
+ return Optional.ofNullable(limit);
+ }
+}
diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveInsertStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveInsertStatement.java
new file mode 100644
index 0000000000000..f8b74afa738e5
--- /dev/null
+++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveInsertStatement.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.dml;
+
+import lombok.Setter;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.SetAssignmentSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.OnDuplicateKeyColumnsSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement;
+import org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.HiveStatement;
+
+import java.util.Optional;
+
+/**
+ * Hive insert statement.
+ */
+@Setter
+public final class HiveInsertStatement extends InsertStatement implements HiveStatement {
+
+ private SetAssignmentSegment setAssignment;
+
+ private OnDuplicateKeyColumnsSegment onDuplicateKeyColumns;
+
+ /**
+ * Get set assignment segment.
+ *
+ * @return set assignment segment
+ */
+ public Optional getSetAssignment() {
+ return Optional.ofNullable(setAssignment);
+ }
+
+ /**
+ * Get on duplicate key columns segment.
+ *
+ * @return on duplicate key columns segment
+ */
+ public Optional getOnDuplicateKeyColumns() {
+ return Optional.ofNullable(onDuplicateKeyColumns);
+ }
+}
diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveSelectStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveSelectStatement.java
new file mode 100644
index 0000000000000..84e16af9ebd03
--- /dev/null
+++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveSelectStatement.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.dml;
+
+import lombok.Setter;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.limit.LimitSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.LockSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.WindowSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement;
+import org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.HiveStatement;
+
+import java.util.Optional;
+
+/**
+ * Hive select statement.
+ */
+@Setter
+public final class HiveSelectStatement extends SelectStatement implements HiveStatement {
+
+ private SimpleTableSegment table;
+
+ private LimitSegment limit;
+
+ private LockSegment lock;
+
+ private WindowSegment window;
+
+ /**
+ * Get order by segment.
+ *
+ * @return order by segment
+ */
+ public Optional getLimit() {
+ return Optional.ofNullable(limit);
+ }
+
+ /**
+ * Get lock segment.
+ *
+ * @return lock segment
+ */
+ public Optional getLock() {
+ return Optional.ofNullable(lock);
+ }
+
+ /**
+ * Get window segment.
+ *
+ * @return window segment
+ */
+ public Optional getWindow() {
+ return Optional.ofNullable(window);
+ }
+
+ /**
+ * Get simple table segment.
+ *
+ * @return simple table segment
+ */
+ public Optional getTable() {
+ return Optional.ofNullable(table);
+ }
+}
diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveUpdateStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveUpdateStatement.java
new file mode 100644
index 0000000000000..ccd6fd9f094ec
--- /dev/null
+++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/hive/dml/HiveUpdateStatement.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.dml;
+
+import lombok.Setter;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.OrderBySegment;
+import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.limit.LimitSegment;
+import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement;
+import org.apache.shardingsphere.sql.parser.sql.dialect.statement.hive.HiveStatement;
+
+import java.util.Optional;
+
+/**
+ * Hive update statement.
+ */
+@Setter
+public final class HiveUpdateStatement extends UpdateStatement implements HiveStatement {
+
+ private OrderBySegment orderBy;
+
+ private LimitSegment limit;
+
+ /**
+ * Get order by segment.
+ *
+ * @return order by segment
+ */
+ public Optional getOrderBy() {
+ return Optional.ofNullable(orderBy);
+ }
+
+ /**
+ * Get order by segment.
+ *
+ * @return order by segment
+ */
+ public Optional getLimit() {
+ return Optional.ofNullable(limit);
+ }
+}