Compare commits

...

No commits in common. 'master' and 'presto-jd' have entirely different histories.

@ -1,19 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="test"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/Java">
<attributes>
<attribute name="owner.project.facets" value="java"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jst.server.core.container/org.eclipse.jst.server.tomcat.runtimeTarget/Apache Tomcat v7.0">
<attributes>
<attribute name="owner.project.facets" value="jst.web"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.web.container"/>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.module.container"/>
<classpathentry kind="lib" path="WebContent/WEB-INF/lib/junit-4.12.jar"/>
<classpathentry kind="output" path="build/classes"/>
</classpath>

22
.gitignore vendored

@ -0,0 +1,22 @@
*.iml
*.ipr
*.iws
target/
/var
/*/var/
pom.xml.versionsBackup
test-output/
/atlassian-ide-plugin.xml
.idea
.DS_Store
.classpath
.settings
.project
temp-testng-customsuite.xml
test-output
.externalToolBuilders
*~
benchmark_outputs
*.pyc
*.class
.checkstyle

@ -1,36 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>data-aggregation-platform</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.wst.jsdt.core.javascriptValidator</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.wst.common.project.facet.core.builder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.wst.validation.validationbuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jem.workbench.JavaEMFNature</nature>
<nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
<nature>org.eclipse.wst.common.project.facet.core.nature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.wst.jsdt.core.jsNature</nature>
</natures>
</projectDescription>

@ -1,12 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="WebContent"/>
<classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.WebProject">
<attributes>
<attribute name="hide" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.baseBrowserLibrary"/>
<classpathentry kind="output" path=""/>
</classpath>

@ -1,2 +0,0 @@
eclipse.preferences.version=1
encoding//WebContent/WEB-INF/config/config.properties=UTF-8

@ -1,7 +0,0 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
org.eclipse.jdt.core.compiler.compliance=1.7
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.source=1.7

@ -1,9 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0">
<wb-module deploy-name="data-aggregation-platform">
<wb-resource deploy-path="/" source-path="/WebContent" tag="defaultRootSource"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/test"/>
<property name="java-output-path" value="/data-aggregation-platform/build/classes"/>
<property name="context-root" value="/"/>
</wb-module>
</project-modules>

@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<faceted-project>
<runtime name="Apache Tomcat v7.0"/>
<fixed facet="java"/>
<fixed facet="wst.jsdt.web"/>
<fixed facet="jst.web"/>
<installed facet="java" version="1.7"/>
<installed facet="jst.web" version="3.0"/>
<installed facet="wst.jsdt.web" version="1.0"/>
</faceted-project>

@ -1 +0,0 @@
org.eclipse.wst.jsdt.launching.baseBrowserLibrary

@ -0,0 +1,19 @@
language: java
jdk:
- oraclejdk8
env:
global:
- MAVEN_OPTS="-Xmx256M"
sudo: false
cache:
directories:
- $HOME/.m2/io
- $HOME/.m2/org
install: mvn install -DskipTests=true -Dmaven.javadoc.skip=true -B -V -q -T 2
script: mvn test -Dair.check.skip-dependency=true

@ -0,0 +1,11 @@
# Contributing to Presto
## Contributor License Agreement ("CLA")
In order to accept your pull request, we need you to submit a CLA. You only need to do this once, so if you've done this for another Facebook open source project, you're good to go. If you are submitting a pull request for the first time, just let us know that you have completed the CLA and we can cross-check with your GitHub username.
Complete your CLA here: <https://code.facebook.com/cla>
## License
By contributing to Presto, you agree that your contributions will be licensed under the [Apache License Version 2.0 (APLv2)](LICENSE).

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

@ -0,0 +1,73 @@
# Presto
Presto is a distributed SQL query engine for big data.
See the [User Manual](https://prestodb.io/docs/current/) for deployment instructions and end user documentation.
## Requirements
* Mac OS X or Linux
* Java 8, 64-bit
* Maven 3.2.3+ (for building)
* Python 2.4+ (for running with the launcher script)
## Building Presto
Presto is a standard Maven project. Simply run the following command from the project root directory:
mvn clean install
On the first build, Maven will download all the dependencies from the internet and cache them in the local repository (`~/.m2/repository`), which can take a considerable amount of time. Subsequent builds will be faster.
Presto has a comprehensive set of unit tests that can take several minutes to run. You can disable the tests when building:
mvn clean install -DskipTests
## Running Presto in your IDE
### Overview
After building Presto for the first time, you can load the project into your IDE and run the server. We recommend using [IntelliJ IDEA](http://www.jetbrains.com/idea/). Because Presto is a standard Maven project, you can import it into your IDE using the root `pom.xml` file. In IntelliJ, choose Open Project from the Quick Start box or choose Open from the File menu and select the root `pom.xml` file.
After opening the project in IntelliJ, double check that the Java SDK is properly configured properly for the project:
* Open the File menu and select Project Structure
* In the SDKs section, ensure that a 1.8 JDK is selected (create one if none exist)
* In the Project section, ensure the Project language level is set to 8.0 as Presto makes use of several Java 8 language features
Presto comes with sample configuration that should work out-of-the-box for development. Use the following options to create a run configuration:
* Main Class: `com.facebook.presto.server.PrestoServer`
* VM Options: `-ea -Xmx2G -Dconfig=etc/config.properties -Dlog.levels-file=etc/log.properties`
* Working directory: `$MODULE_DIR$`
* Use classpath of module: `presto-main`
The working directory should be the `presto-main` subdirectory. In IntelliJ, using `$MODULE_DIR$` accomplishes this automatically.
Additionally, the Hive plugin must be configured with location of your Hive metastore Thrift service. Add the following to the list of VM options, replacing `localhost:9083` with the correct host and port (or use the below value if you do not have a Hive metastore):
-Dhive.metastore.uri=thrift://localhost:9083
### Using SOCKS for Hive or HDFS
If your Hive metastore or HDFS cluster is not directly accessible to your local machine, you can use SSH port forwarding to access it. Setup a dynamic SOCKS proxy with SSH listening on local port 1080:
ssh -v -N -D 1080 server
Then add the following to the list of VM options:
-Dhive.metastore.thrift.client.socks-proxy=localhost:1080
### Running the CLI
Start the CLI to connect to the server and run SQL queries:
presto-cli/target/presto-cli-*-executable.jar
Run a query to see the nodes in the cluster:
SELECT * FROM system.runtime.nodes;
In the sample configuration, the Hive connector is mounted in the `hive` catalog, so you can run the following queries to show the tables in the Hive database `default`:
SHOW TABLES FROM hive.default;

@ -1,3 +0,0 @@
Manifest-Version: 1.0
Class-Path:

@ -1,39 +0,0 @@
#=============================================================================================================
# MySQL连接配置
#=============================================================================================================
# 驱动程序
jdbc.mysql.driver=com.mysql.jdbc.Driver
# 连接url
jdbc.mysql.url=jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true
# 用户名
jdbc.mysql.username=root
# 密码
jdbc.mysql.password=root
#=============================================================================================================
# Oracle 连接配置
#=============================================================================================================
# 驱动程序
jdbc.oracle.driver=oracle.jdbc.driver.OracleDriver
# 连接url
jdbc.oracle.url=jdbc:oracle:thin:@localhost:1521:orcl;
# 用户名
jdbc.oracle.username=system
# 密码
jdbc.oracle.password=oracle
#=============================================================================================================
# 数据库公共配置
#=============================================================================================================
jdbc.initialSize=5
jdbc.minIdle=5
jdbc.maxIdle=20
jdbc.maxActive=100
jdbc.maxWait=100000
jdbc.defaultAutoCommit=false
jdbc.removeAbandoned=true
jdbc.removeAbandonedTimeout=600
jdbc.testWhileIdle=true
jdbc.timeBetweenEvictionRunsMillis=60000
jdbc.numTestsPerEvictionRun=20
jdbc.minEvictableIdleTimeMillis=300000

@ -1,68 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE log4j:configuration PUBLIC "-//APACHE//DTD LOG4J 1.2//EN" "log4j.dtd">
<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
<!-- 根据需要进行添加 -->
<!-- 输出到控制台 -->
<appender name="console" class="org.apache.log4j.ConsoleAppender">
<param name="Threshold" value="info"></param>
<layout class="org.apache.log4j.TTCCLayout">
<param name="ConversionPattern" value="TTCCLayout"></param>
</layout>
</appender>
<!-- 输出日志到文件 每天一个文件 -->
<appender name="dailyRollingFile" class="org.apache.log4j.DailyRollingFileAppender">
<param name="Threshold" value="info"></param>
<param name="ImmediateFlush" value="true"></param>
<param name="File" value="d:/logs/dailyRollingFile.log"></param>
<param name="DatePattern" value="'.'yyyy-MM-dd'.log'"></param>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="[%d{yyyy-MM-dd HH:mm:ss\} %-5p] [%t] {%c:%L}-%m%n"></param>
</layout>
</appender>
<!-- 输出日志到文件 文件大小到达指定尺寸的时候产生一个新的文件 -->
<appender name="railyFile" class="org.apache.log4j.RollingFileAppender">
<param name="File" value="d:/logs/railyFile.log"></param>
<param name="ImmediateFlush" value="true" />
<param name="Threshold" value="info"></param>
<param name="Append" value="true"></param>
<param name="MaxFileSize" value="30KB"></param>
<param name="MaxBackupIndex" value="100"></param>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="[%d{yyyy-MM-dd HH:mm:ss\} %-5p] [%t] {%c:%L}-%m%n"></param>
</layout>
</appender>
<logger name="com.ibatis" additivity="true">
<level value="DEBUG" />
</logger>
<logger name="java.sql.Connection" additivity="true">
<level value="DEBUG" />
</logger>
<logger name="java.sql.Statement" additivity="true">
<level value="DEBUG" />
</logger>
<logger name="java.sql.PreparedStatement" additivity="true">
<level value="DEBUG" />
<appender-ref ref="IBatis" />
</logger>
<logger name="java.sql.ResultSet" additivity="true">
<level value="DEBUG" />
<appender-ref ref="IBatis" />
</logger>
<!-- 定义全局的日志输出级别,但是在输出目的地的配置中配置的具体输出级别优先级高于全局定义的优先级。 如果在railyFile中定义<param
name="Threshold" value="info"></param>那么将会把info以上级别的信息输出 -->
<root>
<priority value="debug" />
<appender-ref ref="console" />
<appender-ref ref="dailyRollingFile" />
<appender-ref ref="railyFile" />
<appender-ref ref="file" />
</root>
</log4j:configuration>

@ -1,14 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE configuration PUBLIC
"-//mybatis.org//DTD Config 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-config.dtd">
<configuration>
<typeAliases>
<typeAlias alias="EncodedInfoEntity" type="com.platform.entities.EncodedInfoEntity" />
<typeAlias alias="DataInfoEntity" type="com.platform.entities.DataInfoEntity" />
<typeAlias alias="PagerOptions" type="com.platform.entities.PagerOptions"/>
</typeAliases>
<mappers>
<mapper resource="com/dao/mapper/data-detaisl-mapper.xml" />
</mappers>
</configuration>

@ -1,74 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
xmlns:tx="http://www.springframework.org/schema/tx" xmlns:aop="http://www.springframework.org/schema/aop"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/tx
http://www.springframework.org/schema/tx/spring-tx-3.0.xsd
http://www.springframework.org/schema/aop
http://www.springframework.org/schema/aop/spring-aop-3.0.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context-3.0.xsd">
<!-- 读取db.properties中的属性值 -->
<bean
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
<property name="location" value="WEB-INF/config/config.properties"></property>
</bean>
<!-- 连接MySQL数据库 -->
<bean id="mySQLDataSource"
class="org.springframework.jdbc.datasource.DriverManagerDataSource">
<property name="driverClassName" value="${jdbc.mysql.driver}" />
<property name="url" value="${jdbc.mysql.url}" />
<property name="username" value="${jdbc.mysql.username}" />
<property name="password" value="${jdbc.mysql.password}" />
</bean>
<bean id="mySQLSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
<property name="configLocation" value="WEB-INF/config/mybatis-applicationConfig.xml" />
<property name="dataSource" ref="mySQLDataSource" />
</bean>
<bean class="org.mybatis.spring.mapper.MapperScannerConfigurer">
<property name="basePackage" value="com.platform.dao,com.dao.mapper" />
</bean>
<!-- 配置声明式事物 事物管理器 -->
<!-- 配置声明式事物 -->
<bean id="mySQLTxManager"
class="org.springframework.jdbc.datasource.DataSourceTransactionManager">
<property name="dataSource" ref="mySQLDataSource" />
</bean>
<tx:advice id="txAdvice" transaction-manager="mySQLTxManager">
<tx:attributes>
<tx:method name="delete*" propagation="REQUIRED" />
<tx:method name="insert*" propagation="REQUIRED" />
<tx:method name="update*" propagation="REQUIRED" />
<tx:method name="select*" read-only="true" />
</tx:attributes>
</tx:advice>
<aop:config>
<aop:pointcut expression="execution(* com.platform.service.*.*(..))"
id="pointcut" />
<aop:advisor advice-ref="txAdvice" pointcut-ref="pointcut" />
</aop:config>
<!-- 连接oracle数据库 -->
<!-- <bean id="oracleDataSource" class="org.springframework.jdbc.datasource.DriverManagerDataSource"
destroy-method="close"> <property name="driverClassName" value="${jdbc.oracle.driver}"
/> <property name="url" value="${jdbc.oracle.url}" /> <property name="username"
value="${jdbc.oracle.username}" /> <property name="password" value="${jdbc.oracle.password}"
/> <property name="initialSize" value="${jdbc.initialSize}" /> <property
name="minIdle" value="${jdbc.minIdle}" /> <property name="maxIdle" value="${jdbc.maxIdle}"
/> <property name="maxActive" value="${jdbc.maxActive}" /> <property name="maxWait"
value="${jdbc.maxWait}" /> <property name="defaultAutoCommit" value="${jdbc.defaultAutoCommit}"
/> <property name="removeAbandoned" value="${jdbc.removeAbandoned}" /> <property
name="removeAbandonedTimeout" value="${jdbc.removeAbandonedTimeout}" /> <property
name="testWhileIdle" value="${jdbc.testWhileIdle}" /> <property name="timeBetweenEvictionRunsMillis"
value="${jdbc.timeBetweenEvictionRunsMillis}" /> <property name="numTestsPerEvictionRun"
value="${jdbc.numTestsPerEvictionRun}" /> <property name="minEvictableIdleTimeMillis"
value="${jdbc.minEvictableIdleTimeMillis}" /> </bean> -->
<context:component-scan base-package="com.platform.*">
<context:exclude-filter type="annotation"
expression="org.springframework.stereotype.Controller" />
</context:component-scan>
</beans>

@ -1,36 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context-3.0.xsd">
<context:component-scan base-package="com.platform.controller,com.platform.test;" >
<context:include-filter type="annotation" expression="org.springframework.stereotype.Controller"/>
<context:exclude-filter type="annotation" expression="org.springframework.stereotype.Service"/>
</context:component-scan>
<!-- 用于将对象转换为JSON -->
<bean id="stringConverter"
class="org.springframework.http.converter.StringHttpMessageConverter">
<property name="supportedMediaTypes">
<list>
<value>application/json;charset=UTF-8</value>
<value>text/html;charset=UTF-8</value>
</list>
</property>
</bean>
<bean id="jsonConverter"
class="org.springframework.http.converter.json.MappingJackson2HttpMessageConverter"/>
<bean class="org.springframework.web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter">
<property name="messageConverters">
<list>
<ref bean="stringConverter" />
<ref bean="jsonConverter" />
</list>
</property>
</bean>
</beans>

@ -1,61 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<web-app xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://java.sun.com/xml/ns/javaee"
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
id="WebApp_ID" version="3.0">
<display-name>MyFirstTest</display-name>
<welcome-file-list>
<welcome-file>/index.html</welcome-file>
</welcome-file-list>
<!-- 设置post请求字符编码:utf-8,get编码设置tomcat -->
<filter>
<filter-name>CharacterEncodingFilter</filter-name>
<filter-class>org.springframework.web.filter.CharacterEncodingFilter</filter-class>
<init-param>
<param-name>encoding</param-name>
<param-value>UTF-8</param-value>
</init-param>
</filter>
<filter-mapping>
<filter-name>CharacterEncodingFilter</filter-name>
<url-pattern>/*</url-pattern>
</filter-mapping>
<!-- spring配置文件 -->
<context-param>
<param-name>contextConfigLocation</param-name>
<param-value>WEB-INF/config/spring-applicationContext.xml</param-value>
</context-param>
<listener>
<listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>
</listener>
<!-- springmv的配置文件 -->
<servlet>
<servlet-name>springmvc</servlet-name>
<servlet-class>org.springframework.web.servlet.DispatcherServlet</servlet-class>
<init-param>
<param-name>contextConfigLocation</param-name>
<param-value>/WEB-INF/config/springmvc-applicationContext.xml</param-value>
</init-param>
<load-on-startup>1</load-on-startup>
<async-supported>true</async-supported>
</servlet>
<servlet-mapping>
<servlet-name>springmvc</servlet-name>
<url-pattern>/*</url-pattern>
</servlet-mapping>
<!-- log4j配置文件 -->
<context-param>
<param-name>log4jConfigLocation</param-name>
<param-value>/WEB-INF/config/log4j-config.xml</param-value>
</context-param>
<listener>
<listener-class>org.springframework.web.util.Log4jConfigListener</listener-class>
</listener>
<!-- 防止Spring内存溢出监听器 -->
<listener>
<listener-class>org.springframework.web.util.IntrospectorCleanupListener</listener-class>
</listener>
</web-app>

@ -1,23 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Insert title here</title>
<script type="text/javascript" src="jquery-1.7.2.min.js"></script>
<script type="text/javascript">
$(function(){
$("#btn").{"name":"伍名"}click(function(){
$.post("json",function(data){
alert(data);
});
});
});
</script>
</head>
<body>
<form action="hello" method="post">
name:<input type="text" name="name">
<input type="submit" value="submit">
</form>
</body>
</html>

@ -1,93 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper
PUBLIC "-//ibatis.apache.org//DTD Mapper 3.0//EN"
"http://ibatis.apache.org/dtd/ibatis-3-mapper.dtd">
<mapper namespace="com.platform.dao.DataInfoDao">
<resultMap id="getEntityByText" type="DataInfoEntity">
<id property="id" column="id" javaType="int" jdbcType="INTEGER" />
<result property="regionalismCode" column="regionalism_code"
javaType="string" jdbcType="VARCHAR" />
<result property="cityName" column="city_name" javaType="string"
jdbcType="VARCHAR" />
<result property="districtName" column="district_name"
javaType="string" jdbcType="VARCHAR" />
<result property="systemCode" column="system_code" javaType="int"
jdbcType="INTEGER" />
<result property="systemName" column="system_name" javaType="string"
jdbcType="VARCHAR" />
<result property="dataType" column="data_type" javaType="string"
jdbcType="VARCHAR" />
<result property="dataVersion" column="data_version" javaType="int"
jdbcType="INTEGER" />
<result property="submittedBatch" column="submitted_batch"
javaType="string" jdbcType="VARCHAR" />
<result property="dataPath" column="data_path" javaType="string"
jdbcType="VARCHAR" />
<result property="charset" column="data_charset" javaType="string"
jdbcType="VARCHAR" />
<result property="collectingTime" column="collection_time"
javaType="string" jdbcType="DATE" />
<result property="collectorContacts" column="collector_contacts"
javaType="string" jdbcType="VARCHAR" />
<result property="collectorName" column="collector_name"
javaType="string" jdbcType="VARCHAR" />
<result property="year" column="data_year" javaType="string"
jdbcType="VARCHAR" />
</resultMap>
<sql id="conditionsFilters">
<if test="PagerOptions!=null">
<where>
<if test="PagerOptions.dataType!=null">
and data_details.data_type=#{PagerOptions.dataType}
</if>
<if test="PagerOptions.submittedBatch!=null">
and
data_details.submitted_batch=#{PagerOptions.submittedBatch}
</if>
<if test="PagerOptions.cityName!=null">
and data_details.city_name=#{PagerOptions.cityName}
</if>
<if test="PagerOptions.districtName!=null">
and
data_details.district_name=#{PagerOptions.districtName}
</if>
<if test="PagerOptions.dataVersion">
and
data_details.data_version=#{PagerOptions.dataVersion}
</if>
<if test="PagerOptions.systemName">
and data_details=#{PagerOptions.systemName}
</if>
<if test="PagerOptions.dataYear">
and data_details.data_year=#{PagerOptions.dataYear}
</if>
<choose>
<when test="PagerOptions.idIndex>0">
and data_details.id>=#{idIndex}
</when>
<otherwise>
and data_details.id>=0
</otherwise>
</choose>
</where>
ORDER BY data_details.id
<if test="PagerOptions.limit">LIMIT #{PagerOptions.limit}</if>
</if>
</sql>
<select id="getLimitedDataInfoEntities" parameterType="PagerOptions"
resultMap="getEntityByText">
SELECT
id,regionalism_code,city_name,district_name,system_code,system_name,data_type,data_version,submitted_batch,data_path,data_charset,collection_time,collector_name,collector_contacts,data_year
FROM data_details
<include refid="conditionsFilters" />
</select>
<select id="getLimitedDataCount" resultType="java.lang.Integer"
parameterType="PagerOptions">
SELECT COUNT(id) FROM data_details
<include refid="conditionsFilters" />
</select>
</mapper>

@ -1,64 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
xmlns:tx="http://www.springframework.org/schema/tx" xmlns:aop="http://www.springframework.org/schema/aop"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/tx
http://www.springframework.org/schema/tx/spring-tx-3.0.xsd
http://www.springframework.org/schema/aop
http://www.springframework.org/schema/aop/spring-aop-3.0.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context-3.0.xsd">
<!-- 读取db.properties中的属性值 -->
<bean
class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
<property name="location" value="/WebContent/WEB-INF/config/config.properties"></property>
</bean>
<!-- 连接MySQL数据库 -->
<bean id="mySQLDataSource"
class="org.springframework.jdbc.datasource.DriverManagerDataSource">
<property name="driverClassName" value="${jdbc.mysql.driver}" />
<property name="url" value="${jdbc.mysql.url}" />
<property name="username" value="${jdbc.mysql.username}" />
<property name="password" value="${jdbc.mysql.password}" />
</bean>
<bean id="mySQLSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
<property name="configLocation" value="/WebContent/WEB-INF/config/mybatis-applicationConfig.xml" />
<property name="dataSource" ref="mySQLDataSource" />
</bean>
<bean class="org.mybatis.spring.mapper.MapperScannerConfigurer">
<property name="basePackage" value="com.platform.dao" />
</bean>
<!-- 配置声明式事物 -->
<bean id="mySQLTxManager"
class="org.springframework.jdbc.datasource.DataSourceTransactionManager">
<property name="dataSource" ref="mySQLDataSource" />
</bean>
<tx:advice id="txAdvice" transaction-manager="mySQLTxManager">
<tx:attributes>
<tx:method name="delete*" propagation="REQUIRED" />
<tx:method name="insert*" propagation="REQUIRED" />
<tx:method name="update*" propagation="REQUIRED" />
<tx:method name="get*" read-only="true" />
<tx:method name="select*" read-only="true" />
</tx:attributes>
</tx:advice>
<aop:config>
<aop:pointcut expression="execution(* com.platform.service.*.*(..))"
id="pointcut" />
<aop:advisor advice-ref="txAdvice" pointcut-ref="pointcut" />
</aop:config>
<bean id="user" class="com.platform.test.User">
<property name="name" value="伍名" />
<property name="id" value="1" />
<property name="sex" value="男" />
</bean>
<context:component-scan base-package="com.platform.*">
<context:exclude-filter type="annotation"
expression="org.springframework.stereotype.Controller" />
</context:component-scan>
</beans>

@ -0,0 +1,916 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>io.airlift</groupId>
<artifactId>airbase</artifactId>
<version>39</version>
</parent>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-root</artifactId>
<version>0.107</version>
<packaging>pom</packaging>
<name>presto-root</name>
<description>Presto</description>
<url>https://github.com/facebook/presto</url>
<inceptionYear>2012</inceptionYear>
<licenses>
<license>
<name>Apache License 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:git://github.com/facebook/presto.git</connection>
<url>https://github.com/facebook/presto</url>
<tag>0.107</tag>
</scm>
<properties>
<air.main.basedir>${project.basedir}</air.main.basedir>
<air.check.skip-extended>true</air.check.skip-extended>
<air.check.skip-license>false</air.check.skip-license>
<air.check.fail-checkstyle>true</air.check.fail-checkstyle>
<air.check.skip-checkstyle>false</air.check.skip-checkstyle>
<dep.antlr.version>4.3</dep.antlr.version>
<dep.airlift.version>0.110</dep.airlift.version>
<dep.packaging.version>${dep.airlift.version}</dep.packaging.version>
<dep.slice.version>0.14</dep.slice.version>
<cli.skip-execute>true</cli.skip-execute>
<cli.main-class>None</cli.main-class>
<!-- use a fractional hour timezone offset for tests -->
<air.test.timezone>Asia/Katmandu</air.test.timezone>
<air.test.parallel>methods</air.test.parallel>
<air.test.thread-count>4</air.test.thread-count>
<air.test.jvmsize>1792m</air.test.jvmsize>
<air.javadoc.lint>-missing</air.javadoc.lint>
</properties>
<modules>
<module>presto-spi</module>
<module>presto-kafka</module>
<module>presto-cassandra</module>
<module>presto-orc</module>
<module>presto-hive</module>
<module>presto-hive-hadoop1</module>
<module>presto-hive-hadoop2</module>
<module>presto-hive-cdh4</module>
<module>presto-hive-cdh5</module>
<module>presto-example-http</module>
<module>presto-tpch</module>
<module>presto-raptor</module>
<module>presto-base-jdbc</module>
<module>presto-mysql</module>
<module>presto-sqlserver</module>
<module>presto-postgresql</module>
<module>presto-client</module>
<module>presto-parser</module>
<module>presto-main</module>
<module>presto-ml</module>
<module>presto-benchmark</module>
<module>presto-tests</module>
<module>presto-jdbc</module>
<module>presto-cli</module>
<module>presto-benchmark-driver</module>
<module>presto-server</module>
<module>presto-docs</module>
<module>presto-verifier</module>
<module>presto-oracle</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-spi</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-orc</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-hive</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-hive-cdh4</artifactId>
<version>${project.version}</version>
<type>zip</type>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-example-http</artifactId>
<version>${project.version}</version>
<type>zip</type>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-hive</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-tpch</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-base-jdbc</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-mysql</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-sqlserver</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-raptor</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-cli</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-client</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-parser</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-parser</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-main</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-main</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-jdbc</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-server</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-tests</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-benchmark</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto.hadoop</groupId>
<artifactId>hadoop-apache1</artifactId>
<version>0.2</version>
</dependency>
<dependency>
<groupId>com.facebook.presto.hadoop</groupId>
<artifactId>hadoop-apache2</artifactId>
<version>0.1</version>
</dependency>
<dependency>
<groupId>com.facebook.presto.hadoop</groupId>
<artifactId>hadoop-cdh4</artifactId>
<version>0.8</version>
</dependency>
<dependency>
<groupId>com.facebook.presto.hive</groupId>
<artifactId>hive-apache</artifactId>
<version>0.14</version>
</dependency>
<dependency>
<groupId>com.facebook.hive</groupId>
<artifactId>hive-dwrf</artifactId>
<version>0.8</version>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
<exclusion>
<groupId>org.iq80.snappy</groupId>
<artifactId>snappy</artifactId>
</exclusion>
<exclusion>
<groupId>com.facebook.presto.hadoop</groupId>
<artifactId>hadoop-cdh4</artifactId>
</exclusion>
<exclusion>
<groupId>it.unimi.dsi</groupId>
<artifactId>fastutil</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.facebook.hive</groupId>
<artifactId>hive-dwrf-shims</artifactId>
<version>0.8</version>
<exclusions>
<exclusion>
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>2.4.1</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>log</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>log-manager</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>json</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>units</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>concurrent</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>configuration</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>discovery</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>testing</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>node</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>bootstrap</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>event</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>http-server</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>jaxrs</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>jmx</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>trace-token</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>dbpool</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>jmx-http</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>http-client</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>stats</artifactId>
<version>${dep.airlift.version}</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>joni</artifactId>
<version>2.1.5.1</version>
</dependency>
<dependency>
<groupId>io.airlift.tpch</groupId>
<artifactId>tpch</artifactId>
<version>0.4</version>
</dependency>
<dependency>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-all</artifactId>
<version>4.1</version>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>1.3.170</version>
</dependency>
<dependency>
<groupId>org.sonatype.aether</groupId>
<artifactId>aether-api</artifactId>
<version>1.13.1</version>
</dependency>
<dependency>
<groupId>io.airlift.resolver</groupId>
<artifactId>resolver</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>airline</artifactId>
<version>0.6</version>
</dependency>
<dependency>
<groupId>org.iq80.snappy</groupId>
<artifactId>snappy</artifactId>
<version>0.3</version>
</dependency>
<dependency>
<groupId>org.openjdk.jol</groupId>
<artifactId>jol-core</artifactId>
<version>0.2</version>
</dependency>
<dependency>
<groupId>org.jetbrains</groupId>
<artifactId>annotations</artifactId>
<version>13.0</version>
</dependency>
<dependency>
<groupId>it.unimi.dsi</groupId>
<artifactId>fastutil</artifactId>
<version>6.5.9</version>
</dependency>
<dependency>
<groupId>com.facebook.thirdparty</groupId>
<artifactId>libsvm</artifactId>
<version>3.18.1</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.35</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.3-1102-jdbc41</version>
</dependency>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr4-runtime</artifactId>
<version>${dep.antlr.version}</version>
</dependency>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr4-annotations</artifactId>
<version>${dep.antlr.version}</version>
</dependency>
<dependency>
<groupId>jline</groupId>
<artifactId>jline</artifactId>
<version>2.12</version>
</dependency>
<dependency>
<groupId>org.jdbi</groupId>
<artifactId>jdbi</artifactId>
<version>2.55</version>
</dependency>
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
<version>0.9.1</version>
<exclusions>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>net.sf.opencsv</groupId>
<artifactId>opencsv</artifactId>
<version>2.3</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-math3</artifactId>
<version>3.2</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.9</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
<version>3.7.0.Final</version>
</dependency>
<dependency>
<groupId>io.airlift.discovery</groupId>
<artifactId>discovery-server</artifactId>
<version>1.24</version>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk</artifactId>
<version>1.8.9.1</version>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>testing-mysql-server</artifactId>
<version>0.1</version>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>testing-postgresql-server</artifactId>
<version>0.3</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.8.1.1</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-jdk14</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<version>1.1.1.3</version>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<version>3.3.6</version>
<exclusions>
<exclusion>
<artifactId>junit</artifactId>
<groupId>junit</groupId>
</exclusion>
<exclusion>
<artifactId>log4j</artifactId>
<groupId>log4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.101tec</groupId>
<artifactId>zkclient</artifactId>
<version>0.4</version>
<exclusions>
<exclusion>
<artifactId>log4j</artifactId>
<groupId>log4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.jgrapht</groupId>
<artifactId>jgrapht-core</artifactId>
<version>0.9.0</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr4-maven-plugin</artifactId>
<version>${dep.antlr.version}</version>
<executions>
<execution>
<goals>
<goal>antlr4</goal>
</goals>
</execution>
</executions>
<configuration>
<visitor>true</visitor>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.3</version>
</plugin>
<plugin>
<groupId>org.skife.maven</groupId>
<artifactId>really-executable-jar-maven-plugin</artifactId>
<version>1.0.5</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.2.1</version>
</plugin>
<plugin>
<groupId>org.tomdz.maven</groupId>
<artifactId>sphinx-maven-plugin</artifactId>
<version>1.0.3</version>
</plugin>
<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
<plugin>
<!--suppress MavenModelInspection -->
<groupId>org.eclipse.m2e</groupId>
<!--suppress MavenModelInspection -->
<artifactId>lifecycle-mapping</artifactId>
<!--suppress MavenModelInspection -->
<version>1.0.0</version>
<configuration>
<lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<versionRange>[2.5.1,)</versionRange>
<goals>
<goal>copy</goal>
<goal>analyze-dep-mgt</goal>
<goal>analyze-duplicate</goal>
<goal>analyze-only</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore/>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<versionRange>[0.6.2.201302030002,)</versionRange>
<goals>
<goal>prepare-agent</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore/>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>com.mycila</groupId>
<artifactId>license-maven-plugin</artifactId>
<versionRange>[2.3,)</versionRange>
<goals>
<goal>check</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore/>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>com.ning.maven.plugins</groupId>
<artifactId>maven-duplicate-finder-plugin</artifactId>
<versionRange>[1.0.4,)</versionRange>
<goals>
<goal>check</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore/>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<versionRange>[0,)</versionRange>
<goals>
<goal>check</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore/>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>io.takari.maven.plugins</groupId>
<artifactId>presto-maven-plugin</artifactId>
<versionRange>[0,)</versionRange>
<goals>
<goal>generate-service-descriptor</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore/>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>io.takari.maven.plugins</groupId>
<artifactId>takari-lifecycle-plugin</artifactId>
<versionRange>[0,)</versionRange>
<goals>
<goal>compile</goal>
<goal>process-resources</goal>
<goal>process-test-resources</goal>
<goal>testCompile</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore/>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.gaul</groupId>
<artifactId>modernizer-maven-plugin</artifactId>
<versionRange>[0,)</versionRange>
<goals>
<goal>modernizer</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore/>
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.gaul</groupId>
<artifactId>modernizer-maven-plugin</artifactId>
<version>1.2.2</version>
<configuration>
<javaVersion>1.8</javaVersion>
<failOnViolations>false</failOnViolations>
</configuration>
<executions>
<execution>
<id>modernizer</id>
<goals>
<goal>modernizer</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>2.15</version>
<executions>
<execution>
<phase>validate</phase>
<goals>
<goal>check</goal>
</goals>
<configuration>
<skip>${air.check.skip-checkstyle}</skip>
<failOnViolation>${air.check.fail-checkstyle}</failOnViolation>
<consoleOutput>true</consoleOutput>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<configLocation>${air.main.basedir}/src/checkstyle/checks.xml</configLocation>
<excludes>**/com/facebook/presto/operator/PagesIndexOrdering.java</excludes>
</configuration>
</execution>
</executions>
<dependencies>
<!-- The version of checkstyle the plugin depends on doesn't support Java 8,
so override it manually until a new version of the plugin is released.
This is copied verbatim from the dependency declaration in the checkstyle
plugin -->
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>6.6</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>io.takari.maven.plugins</groupId>
<artifactId>presto-maven-plugin</artifactId>
<version>0.1.5</version>
<extensions>true</extensions>
</plugin>
<plugin>
<groupId>io.takari.maven.plugins</groupId>
<artifactId>provisio-maven-plugin</artifactId>
<version>0.1.11</version>
<extensions>true</extensions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration combine.children="append">
<fork>false</fork>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<!-- run cli for development: mvn -am -pl presto-cli -P cli compile exec:java -->
<profile>
<id>cli</id>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<configuration>
<skip>${cli.skip-execute}</skip>
<executable>${java.home}/bin/java</executable>
<mainClass>${cli.main-class}</mainClass>
<arguments>
<argument>--debug</argument>
</arguments>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

@ -0,0 +1,201 @@
<?xml version="1.0"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-root</artifactId>
<version>0.107</version>
</parent>
<artifactId>presto-base-jdbc</artifactId>
<name>presto-base-jdbc</name>
<description>Presto - Base JDBC Connector</description>
<properties>
<air.main.basedir>${project.parent.basedir}</air.main.basedir>
</properties>
<dependencies>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>bootstrap</artifactId>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>log</artifactId>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>configuration</artifactId>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>concurrent</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
</dependency>
<dependency>
<groupId>javax.validation</groupId>
<artifactId>validation-api</artifactId>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>annotations</artifactId>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</dependency>
<!-- Presto SPI -->
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-spi</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>slice</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>commons-dbutils</groupId>
<artifactId>commons-dbutils</artifactId>
<version>1.6</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-main</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>units</artifactId>
<scope>provided</scope>
</dependency>
<!-- for testing -->
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>testing</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.airlift</groupId>
<artifactId>json</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-tpch</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.airlift.tpch</groupId>
<artifactId>tpch</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-tests</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<!-- integration tests take a very long time so only run them in the CI server -->
<excludes>
<exclude>**/TestJdbcDistributedQueries.java</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>ci</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes combine.self="override" />
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

@ -0,0 +1,540 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.plugin.jdbc.cache.JdbcCacheConfig;
import com.facebook.presto.plugin.jdbc.cache.JdbcCacheSplit;
import com.facebook.presto.plugin.jdbc.cache.JdbcJavaBean;
import com.facebook.presto.plugin.jdbc.cache.JdbcResultCache;
import com.facebook.presto.plugin.jdbc.subtable.JdbcSubTableConfig;
import com.facebook.presto.plugin.jdbc.subtable.JdbcSubTableManager;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ConnectorPartition;
import com.facebook.presto.spi.ConnectorPartitionResult;
import com.facebook.presto.spi.ConnectorSplitSource;
import com.facebook.presto.spi.ConnectorTableMetadata;
import com.facebook.presto.spi.FixedSplitSource;
import com.facebook.presto.spi.HostAddress;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.TableNotFoundException;
import com.facebook.presto.spi.TupleDomain;
import com.facebook.presto.spi.type.Type;
import com.google.common.base.Joiner;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.log.Logger;
import io.airlift.slice.Slice;
import javax.annotation.Nullable;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.Driver;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import static com.facebook.presto.spi.StandardErrorCode.NOT_FOUND;
import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.DateType.DATE;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.TimeType.TIME;
import static com.facebook.presto.spi.type.TimeWithTimeZoneType.TIME_WITH_TIME_ZONE;
import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE;
import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Maps.fromProperties;
import static java.util.Collections.nCopies;
import static java.util.Locale.ENGLISH;
public class BaseJdbcClient
implements JdbcClient
{
private static final Logger log = Logger.get(BaseJdbcClient.class);
public static final int TYPE_MYSQL = 1;
public static final int TYPE_ORACLE = 2;
public static final int TYPE_SQLSERVER = 3;
private static final Map<Type, String> SQL_TYPES = ImmutableMap.<Type, String>builder()
.put(BOOLEAN, "boolean")
.put(BIGINT, "bigint")
.put(DOUBLE, "double precision")
.put(VARCHAR, "varchar")
.put(VARBINARY, "varbinary")
.put(DATE, "date")
.put(TIME, "time")
.put(TIME_WITH_TIME_ZONE, "time with timezone")
.put(TIMESTAMP, "timestamp")
.put(TIMESTAMP_WITH_TIME_ZONE, "timestamp with timezone")
.build();
protected final String connectorId;
protected final Driver driver;
protected final String connectionUrl;
protected final Properties connectionProperties;
protected final String identifierQuote;
protected int dbType;
protected final boolean jdbcSubTableEnable;
private JdbcSubTableManager subTableManager;
protected final boolean cacheEnable;
private JdbcResultCache jdbcResultCache;
public BaseJdbcClient(JdbcConnectorId connectorId,
BaseJdbcConfig config,
String identifierQuote,
Driver driver,
JdbcSubTableConfig subTableConfig,
JdbcCacheConfig cacheConfig)
{
this.connectorId = checkNotNull(connectorId, "connectorId is null").toString();
this.identifierQuote = checkNotNull(identifierQuote, "identifierQuote is null");
this.driver = checkNotNull(driver, "driver is null");
checkNotNull(config, "config is null");
connectionUrl = config.getConnectionUrl();
connectionProperties = new Properties();
if (config.getConnectionUser() != null) {
connectionProperties.setProperty("user", config.getConnectionUser());
}
if (config.getConnectionPassword() != null) {
connectionProperties.setProperty("password", config.getConnectionPassword());
}
// sub table
jdbcSubTableEnable = subTableConfig.getJdbcSubTableEnable();
if (jdbcSubTableEnable) {
this.subTableManager = new JdbcSubTableManager(this.connectorId, identifierQuote,
driver, connectionUrl, connectionProperties, subTableConfig);
}
// jdbc cache
cacheEnable = cacheConfig.getJdbcCacheEnable();
if (cacheEnable) {
this.jdbcResultCache = new JdbcResultCache(identifierQuote, driver, connectionProperties, cacheConfig);
}
}
@Override
public Set<String> getSchemaNames()
{
try (Connection connection = driver.connect(connectionUrl, connectionProperties);
ResultSet resultSet = connection.getMetaData().getSchemas()) {
ImmutableSet.Builder<String> schemaNames = ImmutableSet.builder();
while (resultSet.next()) {
String schemaName = resultSet.getString("TABLE_SCHEM").toLowerCase(ENGLISH);
// skip internal schemas
if (!schemaName.equals("information_schema")) {
schemaNames.add(schemaName);
}
}
return schemaNames.build();
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public List<SchemaTableName> getTableNames(@Nullable String schema)
{
try (Connection connection = driver.connect(connectionUrl, connectionProperties)) {
DatabaseMetaData metadata = connection.getMetaData();
if (metadata.storesUpperCaseIdentifiers() && (schema != null)) {
schema = schema.toUpperCase(ENGLISH);
}
try (ResultSet resultSet = getTables(connection, schema, null)) {
ImmutableList.Builder<SchemaTableName> list = ImmutableList.builder();
while (resultSet.next()) {
list.add(getSchemaTableName(resultSet));
}
return list.build();
}
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Nullable
@Override
public JdbcTableHandle getTableHandle(SchemaTableName schemaTableName)
{
try (Connection connection = driver.connect(connectionUrl, connectionProperties)) {
DatabaseMetaData metadata = connection.getMetaData();
String jdbcSchemaName = schemaTableName.getSchemaName();
String jdbcTableName = schemaTableName.getTableName();
if (metadata.storesUpperCaseIdentifiers()) {
jdbcSchemaName = jdbcSchemaName.toUpperCase(ENGLISH);
jdbcTableName = jdbcTableName.toUpperCase(ENGLISH);
}
try (ResultSet resultSet = getTables(connection, jdbcSchemaName, jdbcTableName)) {
List<JdbcTableHandle> tableHandles = new ArrayList<>();
while (resultSet.next()) {
tableHandles.add(new JdbcTableHandle(
connectorId,
schemaTableName,
resultSet.getString("TABLE_CAT"),
resultSet.getString("TABLE_SCHEM"),
resultSet.getString("TABLE_NAME")));
}
if (tableHandles.isEmpty()) {
return null;
}
if (tableHandles.size() > 1) {
throw new PrestoException(NOT_SUPPORTED, "Multiple tables matched: " + schemaTableName);
}
return getOnlyElement(tableHandles);
}
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public List<JdbcColumnHandle> getColumns(JdbcTableHandle tableHandle)
{
try (Connection connection = driver.connect(connectionUrl, connectionProperties)) {
DatabaseMetaData metadata = connection.getMetaData();
try (ResultSet resultSet = metadata.getColumns(tableHandle.getCatalogName(), tableHandle.getSchemaName(), tableHandle.getTableName(), null)) {
List<JdbcColumnHandle> columns = new ArrayList<>();
boolean found = false;
while (resultSet.next()) {
found = true;
Type columnType = toPrestoType(resultSet.getInt("DATA_TYPE"));
// skip unsupported column types
if (columnType != null) {
String columnName = resultSet.getString("COLUMN_NAME");
columns.add(new JdbcColumnHandle(connectorId, columnName, columnType));
}
}
if (!found) {
throw new TableNotFoundException(tableHandle.getSchemaTableName());
}
if (columns.isEmpty()) {
throw new PrestoException(NOT_SUPPORTED, "Table has no supported column types: " + tableHandle.getSchemaTableName());
}
return ImmutableList.copyOf(columns);
}
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public ConnectorPartitionResult getPartitions(JdbcTableHandle jdbcTableHandle, TupleDomain<ColumnHandle> tupleDomain)
{
// currently we don't support partitions
return new ConnectorPartitionResult(
ImmutableList.<ConnectorPartition>of(new JdbcPartition(jdbcTableHandle, tupleDomain)),
tupleDomain);
}
@Override
public ConnectorSplitSource getPartitionSplits(JdbcPartition jdbcPartition)
{
if (jdbcSubTableEnable) {
return subTableManager.getTableSplits(jdbcPartition);
}
JdbcTableHandle jdbcTableHandle = jdbcPartition.getJdbcTableHandle();
List<HostAddress> of = ImmutableList.of();
JdbcSplit jdbcSplit = new JdbcSplit(
connectorId,
jdbcTableHandle.getCatalogName(),
jdbcTableHandle.getSchemaName(),
jdbcTableHandle.getTableName(),
connectionUrl,
fromProperties(connectionProperties),
jdbcPartition.getTupleDomain(),
"", of, true, "", "", "", "", System.nanoTime(), 1, false, "");
return new FixedSplitSource(connectorId, ImmutableList.of(jdbcSplit));
}
@Override
public Connection getConnection(JdbcSplit split)
throws SQLException
{
Connection connection = driver.connect(split.getConnectionUrl(), toProperties(split.getConnectionProperties()));
try {
connection.setReadOnly(true);
}
catch (SQLException e) {
connection.close();
throw e;
}
return connection;
}
@Override
public String buildSql(JdbcSplit split, List<JdbcColumnHandle> columnHandles)
{
return new QueryBuilder(identifierQuote).buildSql(
dbType,
split.getCatalogName(),
split.getSchemaName(),
split.getTableName(),
columnHandles,
split.getTupleDomain());
}
@Override
public JdbcOutputTableHandle beginCreateTable(ConnectorTableMetadata tableMetadata)
{
SchemaTableName schemaTableName = tableMetadata.getTable();
String schema = schemaTableName.getSchemaName();
String table = schemaTableName.getTableName();
if (!getSchemaNames().contains(schema)) {
throw new PrestoException(NOT_FOUND, "Schema not found: " + schema);
}
try (Connection connection = driver.connect(connectionUrl, connectionProperties)) {
boolean uppercase = connection.getMetaData().storesUpperCaseIdentifiers();
if (uppercase) {
schema = schema.toUpperCase(ENGLISH);
table = table.toUpperCase(ENGLISH);
}
String catalog = connection.getCatalog();
String temporaryName = "tmp_presto_" + UUID.randomUUID().toString().replace("-", "");
StringBuilder sql = new StringBuilder()
.append("CREATE TABLE ")
.append(quoted(catalog, schema, temporaryName))
.append(" (");
ImmutableList.Builder<String> columnNames = ImmutableList.builder();
ImmutableList.Builder<Type> columnTypes = ImmutableList.builder();
ImmutableList.Builder<String> columnList = ImmutableList.builder();
for (ColumnMetadata column : tableMetadata.getColumns()) {
String columnName = column.getName();
if (uppercase) {
columnName = columnName.toUpperCase(ENGLISH);
}
columnNames.add(columnName);
columnTypes.add(column.getType());
columnList.add(new StringBuilder()
.append(quoted(columnName))
.append(" ")
.append(toSqlType(column.getType()))
.toString());
}
Joiner.on(", ").appendTo(sql, columnList.build());
sql.append(")");
execute(connection, sql.toString());
return new JdbcOutputTableHandle(
connectorId,
catalog,
schema,
table,
columnNames.build(),
columnTypes.build(),
tableMetadata.getOwner(),
temporaryName,
connectionUrl,
fromProperties(connectionProperties));
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public void commitCreateTable(JdbcOutputTableHandle handle, Collection<Slice> fragments)
{
StringBuilder sql = new StringBuilder()
.append("ALTER TABLE ")
.append(quoted(handle.getCatalogName(), handle.getSchemaName(), handle.getTemporaryTableName()))
.append(" RENAME TO ")
.append(quoted(handle.getCatalogName(), handle.getSchemaName(), handle.getTableName()));
try (Connection connection = getConnection(handle)) {
execute(connection, sql.toString());
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public void dropTable(JdbcTableHandle handle)
{
StringBuilder sql = new StringBuilder()
.append("DROP TABLE ")
.append(quoted(handle.getCatalogName(), handle.getSchemaName(), handle.getTableName()));
try (Connection connection = driver.connect(connectionUrl, connectionProperties)) {
execute(connection, sql.toString());
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public String buildInsertSql(JdbcOutputTableHandle handle)
{
String vars = Joiner.on(',').join(nCopies(handle.getColumnNames().size(), "?"));
return new StringBuilder()
.append("INSERT INTO ")
.append(quoted(handle.getCatalogName(), handle.getSchemaName(), handle.getTemporaryTableName()))
.append(" VALUES (").append(vars).append(")")
.toString();
}
@Override
public Connection getConnection(JdbcOutputTableHandle handle)
throws SQLException
{
return driver.connect(handle.getConnectionUrl(), toProperties(handle.getConnectionProperties()));
}
protected ResultSet getTables(Connection connection, String schemaName, String tableName)
throws SQLException
{
return connection.getMetaData().getTables(connection.getCatalog(), schemaName, tableName, new String[] {"TABLE", "VIEW"});
}
protected SchemaTableName getSchemaTableName(ResultSet resultSet)
throws SQLException
{
return new SchemaTableName(
resultSet.getString("TABLE_SCHEM").toLowerCase(ENGLISH),
resultSet.getString("TABLE_NAME").toLowerCase(ENGLISH));
}
protected void execute(Connection connection, String query)
throws SQLException
{
try (Statement statement = connection.createStatement()) {
log.debug("Execute: %s", query);
statement.execute(query);
}
}
protected Type toPrestoType(int jdbcType)
{
switch (jdbcType) {
case Types.BIT:
case Types.BOOLEAN:
return BOOLEAN;
case Types.TINYINT:
case Types.SMALLINT:
case Types.INTEGER:
case Types.BIGINT:
return BIGINT;
case Types.FLOAT:
case Types.REAL:
case Types.DOUBLE:
case Types.NUMERIC:
case Types.DECIMAL:
return DOUBLE;
case Types.CHAR:
case Types.NCHAR:
case Types.VARCHAR:
case Types.NVARCHAR:
case Types.LONGVARCHAR:
case Types.LONGNVARCHAR:
return VARCHAR;
case Types.BINARY:
case Types.VARBINARY:
case Types.LONGVARBINARY:
return VARBINARY;
case Types.DATE:
return DATE;
case Types.TIME:
return TIME;
case Types.TIMESTAMP:
return TIMESTAMP;
}
return null;
}
protected String toSqlType(Type type)
{
String sqlType = SQL_TYPES.get(type);
if (sqlType != null) {
return sqlType;
}
throw new PrestoException(NOT_SUPPORTED, "Unsuported column type: " + type.getTypeSignature());
}
protected String quoted(String name)
{
name = name.replace(identifierQuote, identifierQuote + identifierQuote);
return identifierQuote + name + identifierQuote;
}
protected String quoted(String catalog, String schema, String table)
{
StringBuilder sb = new StringBuilder();
if (!isNullOrEmpty(catalog)) {
sb.append(quoted(catalog)).append(".");
}
if (!isNullOrEmpty(schema)) {
sb.append(quoted(schema)).append(".");
}
sb.append(quoted(table));
return sb.toString();
}
private static Properties toProperties(Map<String, String> map)
{
Properties properties = new Properties();
for (Map.Entry<String, String> entry : map.entrySet()) {
properties.setProperty(entry.getKey(), entry.getValue());
}
return properties;
}
public synchronized List<JdbcJavaBean> getTableDataSet(JdbcCacheSplit key)
{
return jdbcResultCache.getResult(key);
}
public boolean isCacheTable(String tableName)
{
return cacheEnable && jdbcResultCache != null && jdbcResultCache.isCacheTable(tableName);
}
public void commitPdboLogs(JdbcSplit split, long rowCount)
{
this.subTableManager.commitPdboLogs(split, rowCount);
}
public int getDb_type()
{
return dbType;
}
}

@ -0,0 +1,62 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import io.airlift.configuration.Config;
import javax.validation.constraints.NotNull;
public class BaseJdbcConfig
{
private String connectionUrl;
private String connectionUser;
private String connectionPassword;
@NotNull
public String getConnectionUrl()
{
return connectionUrl;
}
@Config("connection-url")
public BaseJdbcConfig setConnectionUrl(String connectionUrl)
{
this.connectionUrl = connectionUrl;
return this;
}
public String getConnectionUser()
{
return connectionUser;
}
@Config("connection-user")
public BaseJdbcConfig setConnectionUser(String connectionUser)
{
this.connectionUser = connectionUser;
return this;
}
public String getConnectionPassword()
{
return connectionPassword;
}
@Config("connection-password")
public BaseJdbcConfig setConnectionPassword(String connectionPassword)
{
this.connectionPassword = connectionPassword;
return this;
}
}

@ -0,0 +1,62 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorPartitionResult;
import com.facebook.presto.spi.ConnectorSplitSource;
import com.facebook.presto.spi.ConnectorTableMetadata;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.TupleDomain;
import io.airlift.slice.Slice;
import javax.annotation.Nullable;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
import java.util.Set;
public interface JdbcClient
{
Set<String> getSchemaNames();
List<SchemaTableName> getTableNames(@Nullable String schema);
@Nullable
JdbcTableHandle getTableHandle(SchemaTableName schemaTableName);
List<JdbcColumnHandle> getColumns(JdbcTableHandle tableHandle);
ConnectorPartitionResult getPartitions(JdbcTableHandle jdbcTableHandle, TupleDomain<ColumnHandle> tupleDomain);
ConnectorSplitSource getPartitionSplits(JdbcPartition jdbcPartition);
Connection getConnection(JdbcSplit split)
throws SQLException;
String buildSql(JdbcSplit split, List<JdbcColumnHandle> columnHandles);
JdbcOutputTableHandle beginCreateTable(ConnectorTableMetadata tableMetadata);
void commitCreateTable(JdbcOutputTableHandle handle, Collection<Slice> fragments);
void dropTable(JdbcTableHandle jdbcTableHandle);
String buildInsertSql(JdbcOutputTableHandle handle);
Connection getConnection(JdbcOutputTableHandle handle)
throws SQLException;
}

@ -0,0 +1,97 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.type.Type;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkNotNull;
public final class JdbcColumnHandle
implements ColumnHandle
{
private final String connectorId;
private final String columnName;
private final Type columnType;
@JsonCreator
public JdbcColumnHandle(
@JsonProperty("connectorId") String connectorId,
@JsonProperty("columnName") String columnName,
@JsonProperty("columnType") Type columnType)
{
this.connectorId = checkNotNull(connectorId, "connectorId is null");
this.columnName = checkNotNull(columnName, "columnName is null");
this.columnType = checkNotNull(columnType, "columnType is null");
}
@JsonProperty
public String getConnectorId()
{
return connectorId;
}
@JsonProperty
public String getColumnName()
{
return columnName;
}
@JsonProperty
public Type getColumnType()
{
return columnType;
}
public ColumnMetadata getColumnMetadata()
{
return new ColumnMetadata(columnName, columnType, false);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if ((obj == null) || (getClass() != obj.getClass())) {
return false;
}
JdbcColumnHandle o = (JdbcColumnHandle) obj;
return Objects.equals(this.connectorId, o.connectorId) &&
Objects.equals(this.columnName, o.columnName);
}
@Override
public int hashCode()
{
return Objects.hash(connectorId, columnName);
}
@Override
public String toString()
{
return toStringHelper(this)
.add("connectorId", connectorId)
.add("columnName", columnName)
.add("columnType", columnType)
.toString();
}
}

@ -0,0 +1,98 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.Connector;
import com.facebook.presto.spi.ConnectorHandleResolver;
import com.facebook.presto.spi.ConnectorMetadata;
import com.facebook.presto.spi.ConnectorRecordSetProvider;
import com.facebook.presto.spi.ConnectorRecordSinkProvider;
import com.facebook.presto.spi.ConnectorSplitManager;
import io.airlift.bootstrap.LifeCycleManager;
import io.airlift.log.Logger;
import javax.inject.Inject;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcConnector
implements Connector
{
private static final Logger log = Logger.get(JdbcConnector.class);
private final LifeCycleManager lifeCycleManager;
private final JdbcMetadata jdbcMetadata;
private final JdbcSplitManager jdbcSplitManager;
private final JdbcRecordSetProvider jdbcRecordSetProvider;
private final JdbcHandleResolver jdbcHandleResolver;
private final JdbcRecordSinkProvider jdbcRecordSinkProvider;
@Inject
public JdbcConnector(
LifeCycleManager lifeCycleManager,
JdbcMetadata jdbcMetadata,
JdbcSplitManager jdbcSplitManager,
JdbcRecordSetProvider jdbcRecordSetProvider,
JdbcHandleResolver jdbcHandleResolver,
JdbcRecordSinkProvider jdbcRecordSinkProvider)
{
this.lifeCycleManager = checkNotNull(lifeCycleManager, "lifeCycleManager is null");
this.jdbcMetadata = checkNotNull(jdbcMetadata, "jdbcMetadata is null");
this.jdbcSplitManager = checkNotNull(jdbcSplitManager, "jdbcSplitManager is null");
this.jdbcRecordSetProvider = checkNotNull(jdbcRecordSetProvider, "jdbcRecordSetProvider is null");
this.jdbcHandleResolver = checkNotNull(jdbcHandleResolver, "jdbcHandleResolver is null");
this.jdbcRecordSinkProvider = checkNotNull(jdbcRecordSinkProvider, "jdbcRecordSinkProvider is null");
}
@Override
public ConnectorMetadata getMetadata()
{
return jdbcMetadata;
}
@Override
public ConnectorSplitManager getSplitManager()
{
return jdbcSplitManager;
}
@Override
public ConnectorRecordSetProvider getRecordSetProvider()
{
return jdbcRecordSetProvider;
}
@Override
public ConnectorHandleResolver getHandleResolver()
{
return jdbcHandleResolver;
}
@Override
public ConnectorRecordSinkProvider getRecordSinkProvider()
{
return jdbcRecordSinkProvider;
}
@Override
public final void shutdown()
{
try {
lifeCycleManager.stop();
}
catch (Exception e) {
log.error(e, "Error shutting down connector");
}
}
}

@ -0,0 +1,76 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.Connector;
import com.facebook.presto.spi.ConnectorFactory;
import com.facebook.presto.spi.classloader.ThreadContextClassLoader;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Injector;
import com.google.inject.Module;
import io.airlift.bootstrap.Bootstrap;
import java.util.Map;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Strings.isNullOrEmpty;
public class JdbcConnectorFactory
implements ConnectorFactory
{
private final String name;
private final Module module;
private final Map<String, String> optionalConfig;
private final ClassLoader classLoader;
public JdbcConnectorFactory(String name, Module module, Map<String, String> optionalConfig, ClassLoader classLoader)
{
checkArgument(!isNullOrEmpty(name), "name is null or empty");
this.name = name;
this.module = checkNotNull(module, "module is null");
this.optionalConfig = ImmutableMap.copyOf(checkNotNull(optionalConfig, "optionalConfig is null"));
this.classLoader = checkNotNull(classLoader, "classLoader is null");
}
@Override
public String getName()
{
return name;
}
@Override
public Connector create(String connectorId, Map<String, String> requiredConfig)
{
checkNotNull(requiredConfig, "requiredConfig is null");
checkNotNull(optionalConfig, "optionalConfig is null");
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
Bootstrap app = new Bootstrap(new JdbcModule(connectorId), module);
Injector injector = app
.strictConfig()
.doNotInitializeLogging()
.setRequiredConfigurationProperties(requiredConfig)
.setOptionalConfigurationProperties(optionalConfig)
.initialize();
return injector.getInstance(JdbcConnector.class);
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
}

@ -0,0 +1,53 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import java.util.Objects;
import static com.google.common.base.Preconditions.checkNotNull;
public final class JdbcConnectorId
{
private final String id;
public JdbcConnectorId(String id)
{
this.id = checkNotNull(id, "id is null");
}
@Override
public String toString()
{
return id;
}
@Override
public int hashCode()
{
return Objects.hash(id);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if ((obj == null) || (getClass() != obj.getClass())) {
return false;
}
JdbcConnectorId other = (JdbcConnectorId) obj;
return Objects.equals(this.id, other.id);
}
}

@ -0,0 +1,84 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorHandleResolver;
import com.facebook.presto.spi.ConnectorOutputTableHandle;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.ConnectorTableHandle;
import javax.inject.Inject;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcHandleResolver
implements ConnectorHandleResolver
{
private final String connectorId;
@Inject
public JdbcHandleResolver(JdbcConnectorId clientId)
{
this.connectorId = checkNotNull(clientId, "clientId is null").toString();
}
@Override
public boolean canHandle(ConnectorTableHandle tableHandle)
{
return tableHandle instanceof JdbcTableHandle && ((JdbcTableHandle) tableHandle).getConnectorId().equals(connectorId);
}
@Override
public boolean canHandle(ColumnHandle columnHandle)
{
return columnHandle instanceof JdbcColumnHandle && ((JdbcColumnHandle) columnHandle).getConnectorId().equals(connectorId);
}
@Override
public boolean canHandle(ConnectorSplit split)
{
return split instanceof JdbcSplit && ((JdbcSplit) split).getConnectorId().equals(connectorId);
}
@Override
public boolean canHandle(ConnectorOutputTableHandle tableHandle)
{
return (tableHandle instanceof JdbcOutputTableHandle) && ((JdbcOutputTableHandle) tableHandle).getConnectorId().equals(connectorId);
}
@Override
public Class<? extends ConnectorTableHandle> getTableHandleClass()
{
return JdbcTableHandle.class;
}
@Override
public Class<? extends ColumnHandle> getColumnHandleClass()
{
return JdbcColumnHandle.class;
}
@Override
public Class<? extends ConnectorSplit> getSplitClass()
{
return JdbcSplit.class;
}
@Override
public Class<? extends ConnectorOutputTableHandle> getOutputTableHandleClass()
{
return JdbcOutputTableHandle.class;
}
}

@ -0,0 +1,209 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorInsertTableHandle;
import com.facebook.presto.spi.ConnectorMetadata;
import com.facebook.presto.spi.ConnectorOutputTableHandle;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.facebook.presto.spi.ConnectorTableMetadata;
import com.facebook.presto.spi.InsertOption;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.SchemaTablePrefix;
import com.facebook.presto.spi.TableNotFoundException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.slice.Slice;
import javax.inject.Inject;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import static com.facebook.presto.plugin.jdbc.Types.checkType;
import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED;
import static com.facebook.presto.spi.StandardErrorCode.PERMISSION_DENIED;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcMetadata
implements ConnectorMetadata
{
private final JdbcClient jdbcClient;
private final boolean allowDropTable;
@Inject
public JdbcMetadata(JdbcConnectorId connectorId, JdbcClient jdbcClient, JdbcMetadataConfig config)
{
this.jdbcClient = checkNotNull(jdbcClient, "client is null");
checkNotNull(config, "config is null");
allowDropTable = config.isAllowDropTable();
}
@Override
public List<String> listSchemaNames(ConnectorSession session)
{
return ImmutableList.copyOf(jdbcClient.getSchemaNames());
}
@Override
public JdbcTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName)
{
return jdbcClient.getTableHandle(tableName);
}
@Override
public ConnectorTableMetadata getTableMetadata(ConnectorTableHandle table)
{
JdbcTableHandle handle = checkType(table, JdbcTableHandle.class, "tableHandle");
ImmutableList.Builder<ColumnMetadata> columnMetadata = ImmutableList.builder();
for (JdbcColumnHandle column : jdbcClient.getColumns(handle)) {
columnMetadata.add(column.getColumnMetadata());
}
return new ConnectorTableMetadata(handle.getSchemaTableName(), columnMetadata.build());
}
@Override
public List<SchemaTableName> listTables(ConnectorSession session, String schemaNameOrNull)
{
return jdbcClient.getTableNames(schemaNameOrNull);
}
@Override
public ColumnHandle getSampleWeightColumnHandle(ConnectorTableHandle tableHandle)
{
return null;
}
@Override
public Map<String, ColumnHandle> getColumnHandles(ConnectorTableHandle tableHandle)
{
JdbcTableHandle jdbcTableHandle = checkType(tableHandle, JdbcTableHandle.class, "tableHandle");
ImmutableMap.Builder<String, ColumnHandle> columnHandles = ImmutableMap.builder();
for (JdbcColumnHandle column : jdbcClient.getColumns(jdbcTableHandle)) {
columnHandles.put(column.getColumnMetadata().getName(), column);
}
return columnHandles.build();
}
@Override
public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session, SchemaTablePrefix prefix)
{
ImmutableMap.Builder<SchemaTableName, List<ColumnMetadata>> columns = ImmutableMap.builder();
for (SchemaTableName tableName : listTables(session, prefix.getSchemaName())) {
try {
JdbcTableHandle tableHandle = jdbcClient.getTableHandle(tableName);
if (tableHandle == null) {
continue;
}
columns.put(tableName, getTableMetadata(tableHandle).getColumns());
}
catch (TableNotFoundException e) {
// table disappeared during listing operation
}
}
return columns.build();
}
@Override
public ColumnMetadata getColumnMetadata(ConnectorTableHandle tableHandle, ColumnHandle columnHandle)
{
checkType(tableHandle, JdbcTableHandle.class, "tableHandle");
return checkType(columnHandle, JdbcColumnHandle.class, "columnHandle").getColumnMetadata();
}
@Override
public boolean canCreateSampledTables(ConnectorSession session)
{
return false;
}
@Override
public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata)
{
throw new PrestoException(NOT_SUPPORTED, "This connector does not support creating tables");
}
@Override
public void dropTable(ConnectorTableHandle tableHandle)
{
if (!allowDropTable) {
throw new PrestoException(PERMISSION_DENIED, "DROP TABLE is disabled in this catalog");
}
JdbcTableHandle handle = checkType(tableHandle, JdbcTableHandle.class, "tableHandle");
jdbcClient.dropTable(handle);
}
@Override
public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata)
{
return jdbcClient.beginCreateTable(tableMetadata);
}
@Override
public void commitCreateTable(ConnectorOutputTableHandle tableHandle, Collection<Slice> fragments)
{
JdbcOutputTableHandle handle = checkType(tableHandle, JdbcOutputTableHandle.class, "tableHandle");
jdbcClient.commitCreateTable(handle, fragments);
}
@Override
public void renameTable(ConnectorTableHandle tableHandle, SchemaTableName newTableName)
{
throw new PrestoException(NOT_SUPPORTED, "This connector does not support renaming tables");
}
@Override
public ConnectorInsertTableHandle beginInsert(ConnectorSession session, ConnectorTableHandle tableHandle, InsertOption insertOption)
{
throw new PrestoException(NOT_SUPPORTED, "This connector does not support inserts");
}
@Override
public void commitInsert(ConnectorInsertTableHandle insertHandle, Collection<Slice> fragments)
{
throw new UnsupportedOperationException();
}
@Override
public void createView(ConnectorSession session, SchemaTableName viewName, String viewData, boolean replace)
{
throw new PrestoException(NOT_SUPPORTED, "This connector does not support creating views");
}
@Override
public void dropView(ConnectorSession session, SchemaTableName viewName)
{
throw new PrestoException(NOT_SUPPORTED, "This connector does not support dropping views");
}
@Override
public List<SchemaTableName> listViews(ConnectorSession session, String schemaNameOrNull)
{
return ImmutableList.of();
}
@Override
public Map<SchemaTableName, String> getViews(ConnectorSession session, SchemaTablePrefix prefix)
{
return ImmutableMap.of();
}
}

@ -0,0 +1,35 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;
public class JdbcMetadataConfig
{
private boolean allowDropTable;
public boolean isAllowDropTable()
{
return allowDropTable;
}
@Config("allow-drop-table")
@ConfigDescription("Allow connector to drop tables")
public JdbcMetadataConfig setAllowDropTable(boolean allowDropTable)
{
this.allowDropTable = allowDropTable;
return this;
}
}

@ -0,0 +1,45 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.google.inject.Binder;
import com.google.inject.Module;
import com.google.inject.Scopes;
import static com.google.common.base.Preconditions.checkNotNull;
import static io.airlift.configuration.ConfigBinder.configBinder;
public class JdbcModule
implements Module
{
private final String connectorId;
public JdbcModule(String connectorId)
{
this.connectorId = checkNotNull(connectorId, "connector id is null");
}
@Override
public void configure(Binder binder)
{
binder.bind(JdbcConnectorId.class).toInstance(new JdbcConnectorId(connectorId));
binder.bind(JdbcMetadata.class).in(Scopes.SINGLETON);
binder.bind(JdbcSplitManager.class).in(Scopes.SINGLETON);
binder.bind(JdbcRecordSetProvider.class).in(Scopes.SINGLETON);
binder.bind(JdbcHandleResolver.class).in(Scopes.SINGLETON);
binder.bind(JdbcRecordSinkProvider.class).in(Scopes.SINGLETON);
binder.bind(JdbcConnector.class).in(Scopes.SINGLETON);
configBinder(binder).bindConfig(JdbcMetadataConfig.class);
}
}

@ -0,0 +1,181 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ConnectorOutputTableHandle;
import com.facebook.presto.spi.type.Type;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import javax.annotation.Nullable;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.String.format;
public class JdbcOutputTableHandle
implements ConnectorOutputTableHandle
{
private final String connectorId;
private final String catalogName;
private final String schemaName;
private final String tableName;
private final List<String> columnNames;
private final List<Type> columnTypes;
private final String tableOwner;
private final String temporaryTableName;
private final String connectionUrl;
private final Map<String, String> connectionProperties;
@JsonCreator
public JdbcOutputTableHandle(
@JsonProperty("connectorId") String connectorId,
@JsonProperty("catalogName") @Nullable String catalogName,
@JsonProperty("schemaName") @Nullable String schemaName,
@JsonProperty("tableName") String tableName,
@JsonProperty("columnNames") List<String> columnNames,
@JsonProperty("columnTypes") List<Type> columnTypes,
@JsonProperty("tableOwner") String tableOwner,
@JsonProperty("temporaryTableName") String temporaryTableName,
@JsonProperty("connectionUrl") String connectionUrl,
@JsonProperty("connectionProperties") Map<String, String> connectionProperties)
{
this.connectorId = checkNotNull(connectorId, "connectorId is null");
this.catalogName = catalogName;
this.schemaName = schemaName;
this.tableName = checkNotNull(tableName, "tableName is null");
this.tableOwner = checkNotNull(tableOwner, "tableOwner is null");
this.temporaryTableName = checkNotNull(temporaryTableName, "temporaryTableName is null");
this.connectionUrl = checkNotNull(connectionUrl, "connectionUrl is null");
this.connectionProperties = ImmutableMap.copyOf(checkNotNull(connectionProperties, "connectionProperties is null"));
checkNotNull(columnNames, "columnNames is null");
checkNotNull(columnTypes, "columnTypes is null");
checkArgument(columnNames.size() == columnTypes.size(), "columnNames and columnTypes sizes don't match");
this.columnNames = ImmutableList.copyOf(columnNames);
this.columnTypes = ImmutableList.copyOf(columnTypes);
}
@JsonProperty
public String getConnectorId()
{
return connectorId;
}
@JsonProperty
@Nullable
public String getCatalogName()
{
return catalogName;
}
@JsonProperty
@Nullable
public String getSchemaName()
{
return schemaName;
}
@JsonProperty
public String getTableName()
{
return tableName;
}
@JsonProperty
public List<String> getColumnNames()
{
return columnNames;
}
@JsonProperty
public List<Type> getColumnTypes()
{
return columnTypes;
}
@JsonProperty
public String getTableOwner()
{
return tableOwner;
}
@JsonProperty
public String getTemporaryTableName()
{
return temporaryTableName;
}
@JsonProperty
public String getConnectionUrl()
{
return connectionUrl;
}
@JsonProperty
public Map<String, String> getConnectionProperties()
{
return connectionProperties;
}
@Override
public String toString()
{
return format("jdbc:%s.%s.%s", catalogName, schemaName, tableName);
}
@Override
public int hashCode()
{
return Objects.hash(
connectorId,
catalogName,
schemaName,
tableName,
columnNames,
columnTypes,
tableOwner,
temporaryTableName,
connectionUrl,
connectionProperties);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
JdbcOutputTableHandle other = (JdbcOutputTableHandle) obj;
return Objects.equals(this.connectorId, other.connectorId) &&
Objects.equals(this.catalogName, other.catalogName) &&
Objects.equals(this.schemaName, other.schemaName) &&
Objects.equals(this.tableName, other.tableName) &&
Objects.equals(this.columnNames, other.columnNames) &&
Objects.equals(this.columnTypes, other.columnTypes) &&
Objects.equals(this.tableOwner, other.tableOwner) &&
Objects.equals(this.temporaryTableName, other.temporaryTableName) &&
Objects.equals(this.connectionUrl, other.connectionUrl) &&
Objects.equals(this.connectionProperties, other.connectionProperties);
}
}

@ -0,0 +1,59 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorPartition;
import com.facebook.presto.spi.TupleDomain;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcPartition
implements ConnectorPartition
{
private final JdbcTableHandle jdbcTableHandle;
private final TupleDomain<ColumnHandle> domain;
public JdbcPartition(JdbcTableHandle jdbcTableHandle, TupleDomain<ColumnHandle> domain)
{
this.jdbcTableHandle = checkNotNull(jdbcTableHandle, "jdbcTableHandle is null");
this.domain = checkNotNull(domain, "domain is null");
}
@Override
public String getPartitionId()
{
return jdbcTableHandle.toString();
}
public JdbcTableHandle getJdbcTableHandle()
{
return jdbcTableHandle;
}
@Override
public TupleDomain<ColumnHandle> getTupleDomain()
{
return domain;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("jdbcTableHandle", jdbcTableHandle)
.toString();
}
}

@ -0,0 +1,63 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ConnectorFactory;
import com.facebook.presto.spi.Plugin;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Module;
import java.util.List;
import java.util.Map;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Strings.isNullOrEmpty;
public class JdbcPlugin
implements Plugin
{
private final String name;
private final Module module;
private Map<String, String> optionalConfig = ImmutableMap.of();
public JdbcPlugin(String name, Module module)
{
checkArgument(!isNullOrEmpty(name), "name is null or empty");
this.name = name;
this.module = checkNotNull(module, "module is null");
}
@Override
public void setOptionalConfig(Map<String, String> optionalConfig)
{
this.optionalConfig = ImmutableMap.copyOf(checkNotNull(optionalConfig, "optionalConfig is null"));
}
@Override
public <T> List<T> getServices(Class<T> type)
{
if (type == ConnectorFactory.class) {
return ImmutableList.of(type.cast(new JdbcConnectorFactory(name, module, optionalConfig, getClassLoader())));
}
return ImmutableList.of();
}
private static ClassLoader getClassLoader()
{
return firstNonNull(Thread.currentThread().getContextClassLoader(), JdbcPlugin.class.getClassLoader());
}
}

@ -0,0 +1,362 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.plugin.jdbc.cache.JdbcCacheSplit;
import com.facebook.presto.plugin.jdbc.cache.JdbcJavaBean;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.RecordCursor;
import com.facebook.presto.spi.type.BigintType;
import com.facebook.presto.spi.type.DateType;
import com.facebook.presto.spi.type.TimeType;
import com.facebook.presto.spi.type.TimestampType;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.VarbinaryType;
import com.facebook.presto.spi.type.VarcharType;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import io.airlift.log.Logger;
import io.airlift.slice.Slice;
import org.joda.time.chrono.ISOChronology;
import java.sql.Connection;
import java.sql.Date;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import static com.facebook.presto.spi.StandardErrorCode.INTERNAL_ERROR;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.isNullOrEmpty;
import static io.airlift.slice.Slices.utf8Slice;
import static io.airlift.slice.Slices.wrappedBuffer;
import static org.joda.time.DateTimeZone.UTC;
public class JdbcRecordCursor
implements RecordCursor
{
private static final Logger log = Logger.get(JdbcRecordCursor.class);
private static final ISOChronology UTC_CHRONOLOGY = ISOChronology.getInstance(UTC);
private final List<JdbcColumnHandle> columnHandles;
private Connection connection;
private Statement statement;
private ResultSet resultSet;
private boolean closed;
private List<JdbcJavaBean> tableDataSet;
private boolean isCacheTable = false;
private AtomicLong rowRecord = new AtomicLong(0);
private BaseJdbcClient client;
private JdbcSplit split;
public JdbcRecordCursor(JdbcClient jdbcClient, JdbcSplit split, List<JdbcColumnHandle> columnHandles)
{
this.client = (BaseJdbcClient) jdbcClient;
this.split = split;
isCacheTable = client.isCacheTable(split.getBaseTableName());
this.columnHandles = ImmutableList.copyOf(checkNotNull(columnHandles, "columnHandles is null"));
if (isCacheTable) {
JdbcCacheSplit key = new JdbcCacheSplit(split.getConnectorId(), split.getCatalogName(),
split.getSchemaName(), split.getTableName(), split.getConnectionUrl(), split.getBaseTableName());
tableDataSet = client.getTableDataSet(key);
}
else {
String sql = jdbcClient.buildSql(split, columnHandles);
try {
connection = jdbcClient.getConnection(split);
statement = connection.createStatement();
statement.setFetchSize(1000);
String whereCondition = split.getSplitPart();
if (!isNullOrEmpty(whereCondition)) {
if (whereCondition.indexOf("LIMIT") != -1) {
sql += split.getSplitPart();
}
else {
if (sql.indexOf("WHERE") != -1) {
sql += " AND " + split.getSplitPart();
}
else {
sql += " WHERE " + split.getSplitPart();
}
}
}
long startTime = System.currentTimeMillis();
log.info("JdbcRecordCursor Executing: %s ", sql);
resultSet = statement.executeQuery(sql);
log.debug("The connection url: %s ,JdbcRecordCursor Executing: %s ,spend time : %s", split.getConnectionUrl(), sql, (System.currentTimeMillis() - startTime));
}
catch (SQLException e) {
log.error("Execute sql [%s] error, connection url : %s", sql, split.getConnectionUrl());
throw handleSqlException(e);
}
}
}
@Override
public long getReadTimeNanos()
{
return 0;
}
@Override
public long getTotalBytes()
{
return 0;
}
@Override
public long getCompletedBytes()
{
return 0;
}
@Override
public Type getType(int field)
{
return columnHandles.get(field).getColumnType();
}
@Override
public boolean advanceNextPosition()
{
if (closed) {
return false;
}
boolean result;
if (isCacheTable) {
long andIncrement = rowRecord.incrementAndGet();
result = andIncrement <= tableDataSet.size();
}
else {
try {
result = resultSet.next();
if (result) {
rowRecord.getAndIncrement();
}
}
catch (SQLException e) {
throw handleSqlException(e);
}
}
if (!result) {
close();
}
return result;
}
@Override
public boolean getBoolean(int field)
{
checkState(!closed, "cursor is closed");
if (isCacheTable) {
return (boolean) getFieldValue(field);
}
else {
try {
return resultSet.getBoolean(field + 1);
}
catch (SQLException e) {
throw handleSqlException(e);
}
}
}
@Override
public long getLong(int field)
{
checkState(!closed, "cursor is closed");
try {
Type type = getType(field);
if (type.equals(BigintType.BIGINT)) {
if (isCacheTable) {
return (long) getFieldValue(field);
}
else {
return resultSet.getLong(field + 1);
}
}
if (type.equals(DateType.DATE)) {
Date date = null;
if (isCacheTable) {
date = (Date) getFieldValue(field);
}
else {
date = resultSet.getDate(field + 1);
}
// JDBC returns a date using a timestamp at midnight in the JVM timezone
long localMillis = date.getTime();
// Convert it to a midnight in UTC
long utcMillis = ISOChronology.getInstance().getZone().getMillisKeepLocal(UTC, localMillis);
// convert to days
return TimeUnit.MILLISECONDS.toDays(utcMillis);
}
if (type.equals(TimeType.TIME)) {
Time time = null;
if (isCacheTable) {
time = (Time) getFieldValue(field);
}
else {
time = resultSet.getTime(field + 1);
}
return UTC_CHRONOLOGY.millisOfDay().get(time.getTime());
}
if (type.equals(TimestampType.TIMESTAMP)) {
Timestamp timestamp = null;
if (isCacheTable) {
timestamp = (Timestamp) getFieldValue(field);
}
else {
timestamp = resultSet.getTimestamp(field + 1);
}
return timestamp.getTime();
}
throw new PrestoException(INTERNAL_ERROR, "Unhandled type for long: " + type.getTypeSignature());
}
catch (SQLException e) {
throw handleSqlException(e);
}
}
@Override
public double getDouble(int field)
{
checkState(!closed, "cursor is closed");
if (isCacheTable) {
return (double) getFieldValue(field);
}
else {
try {
return resultSet.getDouble(field + 1);
}
catch (SQLException e) {
throw handleSqlException(e);
}
}
}
@Override
public Slice getSlice(int field)
{
checkState(!closed, "cursor is closed");
try {
Type type = getType(field);
if (type.equals(VarcharType.VARCHAR)) {
String str = null;
if (isCacheTable) {
str = (String) getFieldValue(field);
}
else {
str = resultSet.getString(field + 1);
}
return utf8Slice(str);
}
if (type.equals(VarbinaryType.VARBINARY)) {
byte[] bytes = null;
if (isCacheTable) {
bytes = (byte[]) getFieldValue(field);
}
else {
bytes = resultSet.getBytes(field + 1);
}
return wrappedBuffer(bytes);
}
throw new PrestoException(INTERNAL_ERROR, "Unhandled type for slice: " + type.getTypeSignature());
}
catch (SQLException e) {
throw handleSqlException(e);
}
}
@Override
public boolean isNull(int field)
{
checkState(!closed, "cursor is closed");
checkArgument(field < columnHandles.size(), "Invalid field index");
try {
if (isCacheTable) {
Object feildValue = getFieldValue(field);
return feildValue == null;
}
// JDBC is kind of dumb: we need to read the field and then ask
// if it was null, which means we are wasting effort here.
// We could save the result of the field access if it matters.
resultSet.getObject(field + 1);
return resultSet.wasNull();
}
catch (SQLException e) {
throw handleSqlException(e);
}
}
@SuppressWarnings({"UnusedDeclaration", "EmptyTryBlock"})
@Override
public void close()
{
if (closed) {
return;
}
if (!isNullOrEmpty(split.getSplitField()) && split.isCalcStepEnable()) {
client.commitPdboLogs(split, rowRecord.get());
}
closed = true;
try {
if (statement != null) {
statement.cancel();
}
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
// use try with resources to close everything properly
try (ResultSet resultSet = this.resultSet;
Statement statement = this.statement;
Connection connection = this.connection) {
// do nothing
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
private Object getFieldValue(int field)
{
String lowerCase = columnHandles.get(field).getColumnName().toLowerCase();
JdbcJavaBean jdbcJavaBean = tableDataSet.get(rowRecord.intValue() - 1);
return jdbcJavaBean.getFieldObjectValue(jdbcJavaBean.getColumns().indexOf(lowerCase));
}
private RuntimeException handleSqlException(SQLException e)
{
try {
close();
}
catch (Exception closeException) {
e.addSuppressed(closeException);
}
return Throwables.propagate(e);
}
}

@ -0,0 +1,58 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.RecordCursor;
import com.facebook.presto.spi.RecordSet;
import com.facebook.presto.spi.type.Type;
import com.google.common.collect.ImmutableList;
import java.util.List;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcRecordSet
implements RecordSet
{
private final JdbcClient jdbcClient;
private final List<JdbcColumnHandle> columnHandles;
private final List<Type> columnTypes;
private final JdbcSplit split;
public JdbcRecordSet(JdbcClient jdbcClient, JdbcSplit split, List<JdbcColumnHandle> columnHandles)
{
this.jdbcClient = checkNotNull(jdbcClient, "jdbcClient is null");
this.split = checkNotNull(split, "split is null");
checkNotNull(split, "split is null");
this.columnHandles = checkNotNull(columnHandles, "column handles is null");
ImmutableList.Builder<Type> types = ImmutableList.builder();
for (JdbcColumnHandle column : columnHandles) {
types.add(column.getColumnType());
}
this.columnTypes = types.build();
}
@Override
public List<Type> getColumnTypes()
{
return columnTypes;
}
@Override
public RecordCursor cursor()
{
return new JdbcRecordCursor(jdbcClient, split, columnHandles);
}
}

@ -0,0 +1,52 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorRecordSetProvider;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.RecordSet;
import com.google.common.collect.ImmutableList;
import javax.inject.Inject;
import java.util.List;
import static com.facebook.presto.plugin.jdbc.Types.checkType;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcRecordSetProvider
implements ConnectorRecordSetProvider
{
private final JdbcClient jdbcClient;
@Inject
public JdbcRecordSetProvider(JdbcClient jdbcClient)
{
this.jdbcClient = checkNotNull(jdbcClient, "jdbcClient is null");
}
@Override
public RecordSet getRecordSet(ConnectorSplit split, List<? extends ColumnHandle> columns)
{
JdbcSplit jdbcSplit = checkType(split, JdbcSplit.class, "split");
ImmutableList.Builder<JdbcColumnHandle> handles = ImmutableList.builder();
for (ColumnHandle handle : columns) {
handles.add(checkType(handle, JdbcColumnHandle.class, "columnHandle"));
}
return new JdbcRecordSet(jdbcClient, jdbcSplit, handles.build());
}
}

@ -0,0 +1,205 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.RecordSink;
import com.facebook.presto.spi.type.Type;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import io.airlift.slice.Slice;
import org.joda.time.DateTimeZone;
import org.joda.time.chrono.ISOChronology;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.spi.type.DateType.DATE;
import static com.google.common.base.Preconditions.checkState;
import static java.nio.charset.StandardCharsets.UTF_8;
public class JdbcRecordSink
implements RecordSink
{
private final Connection connection;
private final PreparedStatement statement;
private final int fieldCount;
private final List<Type> columnTypes;
private int field = -1;
private int batchSize;
public JdbcRecordSink(JdbcOutputTableHandle handle, JdbcClient jdbcClient)
{
try {
connection = jdbcClient.getConnection(handle);
connection.setAutoCommit(false);
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
try {
statement = connection.prepareStatement(jdbcClient.buildInsertSql(handle));
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
fieldCount = handle.getColumnNames().size();
columnTypes = handle.getColumnTypes();
}
@Override
public void beginRecord(long sampleWeight)
{
checkState(field == -1, "already in record");
field = 0;
}
@Override
public void finishRecord()
{
checkState(field != -1, "not in record");
checkState(field == fieldCount, "not all fields set");
field = -1;
try {
statement.addBatch();
batchSize++;
if (batchSize >= 1000) {
statement.executeBatch();
connection.commit();
connection.setAutoCommit(false);
batchSize = 0;
}
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public void appendNull()
{
try {
statement.setObject(next(), null);
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public void appendBoolean(boolean value)
{
try {
statement.setBoolean(next(), value);
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public void appendLong(long value)
{
try {
if (DATE.equals(columnTypes.get(field))) {
// convert to midnight in default time zone
long utcMillis = TimeUnit.DAYS.toMillis(value);
long localMillis = ISOChronology.getInstanceUTC().getZone().getMillisKeepLocal(DateTimeZone.getDefault(), utcMillis);
statement.setDate(next(), new Date(localMillis));
}
else {
statement.setLong(next(), value);
}
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public void appendDouble(double value)
{
try {
statement.setDouble(next(), value);
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public void appendString(byte[] value)
{
try {
statement.setString(next(), new String(value, UTF_8));
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public Collection<Slice> commit()
{
// commit and close
try (Connection connection = this.connection) {
if (batchSize > 0) {
statement.executeBatch();
connection.commit();
}
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
// the committer does not need any additional info
return ImmutableList.of();
}
@SuppressWarnings("UnusedDeclaration")
@Override
public void rollback()
{
// rollback and close
try (Connection connection = this.connection;
PreparedStatement statement = this.statement) {
connection.rollback();
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
}
@Override
public List<Type> getColumnTypes()
{
return columnTypes;
}
private int next()
{
checkState(field != -1, "not in record");
checkState(field < fieldCount, "all fields already set");
field++;
return field;
}
}

@ -0,0 +1,48 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ConnectorInsertTableHandle;
import com.facebook.presto.spi.ConnectorOutputTableHandle;
import com.facebook.presto.spi.ConnectorRecordSinkProvider;
import com.facebook.presto.spi.RecordSink;
import javax.inject.Inject;
import static com.facebook.presto.plugin.jdbc.Types.checkType;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcRecordSinkProvider
implements ConnectorRecordSinkProvider
{
private final JdbcClient jdbcClient;
@Inject
public JdbcRecordSinkProvider(JdbcClient jdbcClient)
{
this.jdbcClient = checkNotNull(jdbcClient, "jdbcClient is null");
}
@Override
public RecordSink getRecordSink(ConnectorOutputTableHandle tableHandle)
{
return new JdbcRecordSink(checkType(tableHandle, JdbcOutputTableHandle.class, "tableHandle"), jdbcClient);
}
@Override
public RecordSink getRecordSink(ConnectorInsertTableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
}

@ -0,0 +1,211 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.HostAddress;
import com.facebook.presto.spi.TupleDomain;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableMap;
import javax.annotation.Nullable;
import java.util.List;
import java.util.Map;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcSplit
implements ConnectorSplit
{
private final String connectorId;
private final String catalogName;
private final String schemaName;
private final String tableName;
private final String connectionUrl;
private final Map<String, String> connectionProperties;
private final TupleDomain<ColumnHandle> tupleDomain;
private final String splitPart;
private final List<HostAddress> addresses;
private final boolean remotelyAccessible;
private final String baseTableName;
private final String splitField;
private final String beginIndex;
private final String endIndex;
private final long timeStamp;
private final int scanNodes;
private final boolean isCalcStepEnable;
private final String dbHost;
@JsonCreator
public JdbcSplit(
@JsonProperty("connectorId") String connectorId,
@JsonProperty("catalogName") @Nullable String catalogName,
@JsonProperty("schemaName") @Nullable String schemaName,
@JsonProperty("tableName") String tableName,
@JsonProperty("connectionUrl") String connectionUrl,
@JsonProperty("connectionProperties") Map<String, String> connectionProperties,
@JsonProperty("tupleDomain") TupleDomain<ColumnHandle> tupleDomain,
@JsonProperty("splitPart") String splitPart,
@JsonProperty("addresses") List<HostAddress> addresses,
@JsonProperty("remotelyAccessible") boolean remotelyAccessible,
@JsonProperty("baseTableName") String baseTableName,
@JsonProperty("splitField") String splitField,
@JsonProperty("beginIndex") String beginIndex,
@JsonProperty("endIndex") String endIndex,
@JsonProperty("timeStamp") long timeStamp,
@JsonProperty("scanNodes") int scanNodes,
@JsonProperty("isCalcStepEnable") boolean isCalcStepEnable,
@JsonProperty("dbHost") String dbHost)
{
this.connectorId = checkNotNull(connectorId, "connector id is null");
this.catalogName = catalogName;
this.schemaName = schemaName;
this.tableName = checkNotNull(tableName, "table name is null");
this.connectionUrl = checkNotNull(connectionUrl, "connectionUrl is null");
this.connectionProperties = ImmutableMap.copyOf(checkNotNull(connectionProperties, "connectionProperties is null"));
this.tupleDomain = checkNotNull(tupleDomain, "tupleDomain is null");
this.splitPart = splitPart;
this.remotelyAccessible = remotelyAccessible;
this.addresses = checkNotNull(addresses, "host addresses is null");
this.baseTableName = baseTableName;
this.splitField = splitField;
this.beginIndex = beginIndex;
this.endIndex = endIndex;
this.timeStamp = timeStamp;
this.scanNodes = scanNodes;
this.isCalcStepEnable = isCalcStepEnable;
this.dbHost = dbHost;
}
@JsonProperty
public String getConnectorId()
{
return connectorId;
}
@JsonProperty
@Nullable
public String getCatalogName()
{
return catalogName;
}
@JsonProperty
@Nullable
public String getSchemaName()
{
return schemaName;
}
@JsonProperty
public String getTableName()
{
return tableName;
}
@JsonProperty
public String getConnectionUrl()
{
return connectionUrl;
}
@JsonProperty
public Map<String, String> getConnectionProperties()
{
return connectionProperties;
}
@JsonProperty
public TupleDomain<ColumnHandle> getTupleDomain()
{
return tupleDomain;
}
@JsonProperty
@Override
public boolean isRemotelyAccessible()
{
return remotelyAccessible;
}
@JsonProperty
@Override
public List<HostAddress> getAddresses()
{
return addresses;
}
@Override
public Object getInfo()
{
return this;
}
@JsonProperty
public String getSplitPart()
{
return splitPart;
}
@JsonProperty
public String getBaseTableName()
{
return baseTableName;
}
@JsonProperty
public String getSplitField()
{
return splitField;
}
@JsonProperty
public String getBeginIndex()
{
return beginIndex;
}
@JsonProperty
public String getEndIndex()
{
return endIndex;
}
@JsonProperty
public long getTimeStamp()
{
return timeStamp;
}
@JsonProperty
public int getScanNodes()
{
return scanNodes;
}
@JsonProperty
public boolean isCalcStepEnable()
{
return isCalcStepEnable;
}
@JsonProperty
public String getDbHost()
{
return dbHost;
}
}

@ -0,0 +1,67 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorPartition;
import com.facebook.presto.spi.ConnectorPartitionResult;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.ConnectorSplitManager;
import com.facebook.presto.spi.ConnectorSplitSource;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.facebook.presto.spi.FixedSplitSource;
import com.facebook.presto.spi.TupleDomain;
import com.google.common.collect.ImmutableList;
import javax.inject.Inject;
import java.util.List;
import static com.facebook.presto.plugin.jdbc.Types.checkType;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcSplitManager
implements ConnectorSplitManager
{
private final String connectorId;
private final JdbcClient jdbcClient;
@Inject
public JdbcSplitManager(JdbcConnectorId connectorId, JdbcClient jdbcClient)
{
this.connectorId = checkNotNull(connectorId, "connectorId is null").toString();
this.jdbcClient = checkNotNull(jdbcClient, "client is null");
}
@Override
public ConnectorPartitionResult getPartitions(ConnectorTableHandle tableHandle, TupleDomain<ColumnHandle> tupleDomain)
{
JdbcTableHandle handle = checkType(tableHandle, JdbcTableHandle.class, "tableHandle");
return jdbcClient.getPartitions(handle, tupleDomain);
}
@Override
public ConnectorSplitSource getPartitionSplits(ConnectorTableHandle tableHandle, List<ConnectorPartition> partitions)
{
if (partitions.isEmpty()) {
return new FixedSplitSource(connectorId, ImmutableList.<ConnectorSplit>of());
}
checkArgument(partitions.size() == 1, "Expected one partition but got %s", partitions.size());
JdbcPartition partition = checkType(partitions.get(0), JdbcPartition.class, "partition");
return jdbcClient.getPartitionSplits(partition);
}
}

@ -0,0 +1,109 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.facebook.presto.spi.SchemaTableName;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Joiner;
import javax.annotation.Nullable;
import java.util.Objects;
import static com.google.common.base.Preconditions.checkNotNull;
public final class JdbcTableHandle
implements ConnectorTableHandle
{
private final String connectorId;
private final SchemaTableName schemaTableName;
private final String catalogName;
private final String schemaName;
private final String tableName;
@JsonCreator
public JdbcTableHandle(
@JsonProperty("connectorId") String connectorId,
@JsonProperty("schemaTableName") SchemaTableName schemaTableName,
@JsonProperty("catalogName") @Nullable String catalogName,
@JsonProperty("schemaName") @Nullable String schemaName,
@JsonProperty("tableName") String tableName)
{
this.connectorId = checkNotNull(connectorId, "connectorId is null");
this.schemaTableName = checkNotNull(schemaTableName, "schemaTableName is null");
this.catalogName = catalogName;
this.schemaName = schemaName;
this.tableName = checkNotNull(tableName, "tableName is null");
}
@JsonProperty
public String getConnectorId()
{
return connectorId;
}
@JsonProperty
public SchemaTableName getSchemaTableName()
{
return schemaTableName;
}
@JsonProperty
@Nullable
public String getCatalogName()
{
return catalogName;
}
@JsonProperty
@Nullable
public String getSchemaName()
{
return schemaName;
}
@JsonProperty
public String getTableName()
{
return tableName;
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if ((obj == null) || (getClass() != obj.getClass())) {
return false;
}
JdbcTableHandle o = (JdbcTableHandle) obj;
return Objects.equals(this.connectorId, o.connectorId) &&
Objects.equals(this.schemaTableName, o.schemaTableName);
}
@Override
public int hashCode()
{
return Objects.hash(connectorId, schemaTableName);
}
@Override
public String toString()
{
return Joiner.on(":").useForNull("null").join(connectorId, schemaTableName, catalogName, schemaName, tableName);
}
}

@ -0,0 +1,199 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.Domain;
import com.facebook.presto.spi.Range;
import com.facebook.presto.spi.TupleDomain;
import com.facebook.presto.spi.type.BigintType;
import com.facebook.presto.spi.type.BooleanType;
import com.facebook.presto.spi.type.DateType;
import com.facebook.presto.spi.type.DoubleType;
import com.facebook.presto.spi.type.TimestampType;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.VarcharType;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import io.airlift.slice.Slice;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Iterables.transform;
public class QueryBuilder
{
private final String quote;
public QueryBuilder(String quote)
{
this.quote = checkNotNull(quote, "quote is null");
}
public String buildSql(int dbtype, String catalog, String schema, String table, List<JdbcColumnHandle> columns, TupleDomain<ColumnHandle> tupleDomain)
{
StringBuilder sql = new StringBuilder();
sql.append("SELECT ");
Joiner.on(", ").appendTo(sql, transform(columns, column -> quote(column.getColumnName())));
if (columns.isEmpty()) {
sql.append("null");
}
sql.append(" FROM ");
if (!isNullOrEmpty(catalog)) {
sql.append(quote(catalog)).append('.');
}
if (!isNullOrEmpty(schema)) {
sql.append(quote(schema)).append('.');
}
sql.append(quote(table));
if (dbtype == BaseJdbcClient.TYPE_SQLSERVER) {
sql.append(" WITH(NOLOCK) ");
}
List<String> clauses = toConjuncts(columns, tupleDomain);
if (!clauses.isEmpty()) {
sql.append(" WHERE ")
.append(Joiner.on(" AND ").join(clauses));
}
return sql.toString();
}
private List<String> toConjuncts(List<JdbcColumnHandle> columns, TupleDomain<ColumnHandle> tupleDomain)
{
ImmutableList.Builder<String> builder = ImmutableList.builder();
for (JdbcColumnHandle column : columns) {
Type type = column.getColumnType();
if (type.equals(BigintType.BIGINT) || type.equals(DoubleType.DOUBLE) || type.equals(BooleanType.BOOLEAN)
|| type.equals(VarcharType.VARCHAR) || type.equals(DateType.DATE) || type.equals(TimestampType.TIMESTAMP)) {
Domain domain = tupleDomain.getDomains().get(column);
if (domain != null) {
builder.add(toPredicate(column.getColumnName(), domain, type));
}
}
}
return builder.build();
}
private String toPredicate(String columnName, Domain domain, Type columnType)
{
if (domain.getRanges().isNone() && domain.isNullAllowed()) {
return quote(columnName) + " IS NULL";
}
if (domain.getRanges().isAll() && !domain.isNullAllowed()) {
return quote(columnName) + " IS NOT NULL";
}
// Add disjuncts for ranges
List<String> disjuncts = new ArrayList<>();
List<Object> singleValues = new ArrayList<>();
for (Range range : domain.getRanges()) {
checkState(!range.isAll()); // Already checked
if (range.isSingleValue()) {
singleValues.add(range.getLow().getValue());
}
else {
List<String> rangeConjuncts = new ArrayList<>();
if (!range.getLow().isLowerUnbounded()) {
switch (range.getLow().getBound()) {
case ABOVE:
rangeConjuncts.add(toPredicate(columnName, ">", range.getLow().getValue(), columnType));
break;
case EXACTLY:
rangeConjuncts.add(toPredicate(columnName, ">=", range.getLow().getValue(), columnType));
break;
case BELOW:
throw new IllegalArgumentException("Low Marker should never use BELOW bound: " + range);
default:
throw new AssertionError("Unhandled bound: " + range.getLow().getBound());
}
}
if (!range.getHigh().isUpperUnbounded()) {
switch (range.getHigh().getBound()) {
case ABOVE:
throw new IllegalArgumentException("High Marker should never use ABOVE bound: " + range);
case EXACTLY:
rangeConjuncts.add(toPredicate(columnName, "<=", range.getHigh().getValue(), columnType));
break;
case BELOW:
rangeConjuncts.add(toPredicate(columnName, "<", range.getHigh().getValue(), columnType));
break;
default:
throw new AssertionError("Unhandled bound: " + range.getHigh().getBound());
}
}
// If rangeConjuncts is null, then the range was ALL, which should already have been checked for
checkState(!rangeConjuncts.isEmpty());
disjuncts.add("(" + Joiner.on(" AND ").join(rangeConjuncts) + ")");
}
}
// Add back all of the possible single values either as an equality or an IN predicate
if (singleValues.size() == 1) {
disjuncts.add(toPredicate(columnName, "=", getOnlyElement(singleValues), columnType));
}
else if (singleValues.size() > 1) {
ImmutableList.Builder<String> inListBuilder = ImmutableList.builder();
singleValues.stream().forEach(value -> inListBuilder.add(encode(value, columnType)));
disjuncts.add(quote(columnName) + " IN (" + Joiner.on(",").join(inListBuilder.build()) + ")");
}
// Add nullability disjuncts
checkState(!disjuncts.isEmpty());
if (domain.isNullAllowed()) {
disjuncts.add(quote(columnName) + " IS NULL");
}
return "(" + Joiner.on(" OR ").join(disjuncts) + ")";
}
private String toPredicate(String columnName, String operator, Object value, Type columnType)
{
return quote(columnName) + " " + operator + " " + encode(value, columnType);
}
private String quote(String name)
{
name = name.replace(quote, quote + quote);
return quote + name + quote;
}
private static String encode(Object value, Type columnType)
{
if (value instanceof Number || value instanceof Boolean) {
if (columnType.equals(DateType.DATE)) {
return "'" + new SimpleDateFormat("yyyy-MM-dd").format(new Date(86400000 * Long.parseLong(value.toString(), 10))) + "'";
}
else if (columnType.equals(TimestampType.TIMESTAMP)) {
return "'" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format(new Date(Long.parseLong(value.toString(), 10))) + "'";
}
return value.toString();
}
else if (value instanceof Slice) {
return "'" + ((Slice) value).toStringUtf8() + "'";
}
throw new UnsupportedOperationException("Can't handle type: " + value.getClass().getName());
}
}

@ -0,0 +1,33 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
public final class Types
{
private Types() {}
public static <A, B extends A> B checkType(A value, Class<B> target, String name)
{
checkNotNull(value, "%s is null", name);
checkArgument(target.isInstance(value),
"%s must be of type %s, not %s",
name,
target.getName(),
value.getClass().getName());
return target.cast(value);
}
}

@ -0,0 +1,89 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc.cache;
import io.airlift.configuration.Config;
import io.airlift.units.Duration;
import java.util.concurrent.TimeUnit;
public class JdbcCacheConfig
{
public static final String DEFAULT_VALUE = "NA";
private String cacheTableConfig = DEFAULT_VALUE;
private String cacheTableClause;
private Duration cacheRefreshInterval = new Duration(5, TimeUnit.MINUTES);
private Duration cacheExpireInterval = new Duration(5, TimeUnit.MINUTES);
private boolean jdbcCacheEnable = false;
public String getCacheTableConfig()
{
return cacheTableConfig;
}
@Config("jdbc-cache-table-config")
public JdbcCacheConfig setCacheTableConfig(String cacheTableConfig)
{
this.cacheTableConfig = cacheTableConfig;
return this;
}
public String getCacheTableClause()
{
return cacheTableClause;
}
@Config("jdbc-cache-table-clause")
public JdbcCacheConfig setCacheTableClause(String cacheTableClause)
{
this.cacheTableClause = cacheTableClause;
return this;
}
public Duration getCacheRefreshInterval()
{
return cacheRefreshInterval;
}
@Config("jdbc-cache-refresh-interval")
public JdbcCacheConfig setCacheRefreshInterval(Duration cacheRefreshInterval)
{
this.cacheRefreshInterval = cacheRefreshInterval;
return this;
}
public Duration getCacheExpireInterval()
{
return cacheExpireInterval;
}
@Config("jdbc-cache-expire-interval")
public JdbcCacheConfig setCacheExpireInterval(Duration cacheExpireInterval)
{
this.cacheExpireInterval = cacheExpireInterval;
return this;
}
public boolean getJdbcCacheEnable()
{
return jdbcCacheEnable;
}
@Config("jdbc-cache-enable")
public JdbcCacheConfig setJdbcCacheEnable(boolean jdbcCacheEnable)
{
this.jdbcCacheEnable = jdbcCacheEnable;
return this;
}
}

@ -0,0 +1,98 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc.cache;
import java.util.Objects;
import static com.google.common.base.Preconditions.checkNotNull;
public class JdbcCacheSplit
{
private final String connectorId;
private final String catalogName;
private final String schemaName;
private final String tableName;
private final String baseTableName;
private final String connectionUrl;
public JdbcCacheSplit(String connectorId, String catalogName,
String schemaName, String tableName, String connectionUrl, String baseTableName)
{
this.connectorId = checkNotNull(connectorId, "connector id is null");
this.catalogName = catalogName;
this.schemaName = schemaName;
this.tableName = checkNotNull(tableName, "table name is null");
this.connectionUrl = checkNotNull(connectionUrl, "connectionUrl is null");
this.baseTableName = checkNotNull(baseTableName, "table name is null");
}
public String getConnectorId()
{
return connectorId;
}
public String getCatalogName()
{
return catalogName == null ? "null" : catalogName;
}
public String getSchemaName()
{
return schemaName == null ? "null" : schemaName;
}
public String getTableName()
{
return tableName;
}
public String getBaseTableName()
{
return baseTableName;
}
public String getConnectionUrl()
{
return connectionUrl;
}
@Override
public int hashCode()
{
return Objects.hash(getConnectorId(), getConnectionUrl(), getCatalogName(), getSchemaName(), getTableName());
}
@Override
public boolean equals(Object obj)
{
if (obj instanceof JdbcCacheSplit) {
JdbcCacheSplit other = (JdbcCacheSplit) obj;
return this.getConnectorId().equals(other.getConnectorId())
&& this.getConnectionUrl().equals(other.getConnectionUrl())
&& this.getCatalogName().equals(other.getCatalogName())
&& this.getSchemaName().equals(other.getSchemaName())
&& this.getTableName().equals(other.getTableName());
}
else {
return this.hashCode() == obj.hashCode();
}
}
@Override
public String toString()
{
return getConnectorId() + ","
+ getConnectionUrl() + ","
+ getCatalogName() + ","
+ getSchemaName() + ","
+ getTableName();
}
}

@ -0,0 +1,42 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc.cache;
import java.util.List;
public class JdbcJavaBean
{
private List<String> columns;
private Object[] values;
public JdbcJavaBean(List<String> columns)
{
this.columns = columns;
this.values = new Object[columns.size()];
}
public Object getFieldObjectValue(int index)
{
return values[index];
}
public void setFieldObjectValue(int index, Object value)
{
values[index] = value;
}
public List<String> getColumns()
{
return columns;
}
}

@ -0,0 +1,227 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc.cache;
import io.airlift.log.Logger;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.Driver;
import java.sql.Statement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import com.facebook.presto.plugin.jdbc.util.JdbcUtil;
import com.facebook.presto.spi.type.BigintType;
import com.facebook.presto.spi.type.BooleanType;
import com.facebook.presto.spi.type.DateType;
import com.facebook.presto.spi.type.DoubleType;
import com.facebook.presto.spi.type.TimeType;
import com.facebook.presto.spi.type.TimestampType;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.VarbinaryType;
import com.facebook.presto.spi.type.VarcharType;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Joiner;
import com.google.common.base.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Strings.isNullOrEmpty;
import static java.util.Locale.ENGLISH;
public class JdbcResultCache
{
private final LoadingCache<JdbcCacheSplit, List<JdbcJavaBean>> jdbcResultCache;
private final String identifierQuote;
private final Driver driver;
private final Properties connectionProperties;
private static final Logger log = Logger.get(JdbcResultCache.class);
private List<String> tableList = new ArrayList<String>();
private HashMap<String, List<String>> fieldList = new HashMap<String, List<String>>();
private LinkedHashMap<String, String> cacheTableClauseMap;
public JdbcResultCache(String identifierQuote,
Driver driver,
Properties connectionProperties,
JdbcCacheConfig cacheConfig)
{
this.identifierQuote = identifierQuote;
this.driver = driver;
this.connectionProperties = connectionProperties;
long expiresAfterWrite = checkNotNull(cacheConfig.getCacheExpireInterval(), "cacheExpireInterval is null").toMillis();
long refreshAfterWrite = checkNotNull(cacheConfig.getCacheRefreshInterval(), "cacheRefreshInterval is null").toMillis();
analyseCacheTableAndField(cacheConfig.getCacheTableConfig(), cacheConfig.getCacheTableClause());
jdbcResultCache = CacheBuilder
.newBuilder()
.expireAfterWrite(expiresAfterWrite, TimeUnit.MILLISECONDS)
.refreshAfterWrite(refreshAfterWrite, TimeUnit.MILLISECONDS)
.build(new CacheLoader<JdbcCacheSplit, List<JdbcJavaBean>>(){
@Override
public List<JdbcJavaBean> load(JdbcCacheSplit key) throws Exception
{
return loadTableDataSet(key);
}
});
}
private List<JdbcJavaBean> loadTableDataSet(JdbcCacheSplit key)
{
log.debug("loadTableDataSet key : " + key);
List<JdbcJavaBean> list = new ArrayList<JdbcJavaBean>();
try {
Connection connection = getConnection(key.getConnectionUrl());
HashMap<String, Type> types = getColumnTypes(key);
String tableName = key.getBaseTableName();
List<String> columns = fieldList.get(tableName);
String columnPart = Joiner.on(",").join(columns);
String sql = "SELECT " + columnPart + " FROM " +
JdbcUtil.getTableName(identifierQuote, key.getCatalogName(), key.getSchemaName(), key.getTableName());
if (cacheTableClauseMap != null && !isNullOrEmpty(cacheTableClauseMap.get(tableName))) {
sql += " WHERE " + cacheTableClauseMap.get(tableName);
}
Statement statement = connection.createStatement();
statement.setFetchSize(10_000);
long startTime = System.currentTimeMillis();
ResultSet resultSet = statement.executeQuery(sql);
log.debug("The connection url: %s ,ExecuteQuery: %s ,spend time : %s , thread id : %s", key.getConnectionUrl(), sql, (System.currentTimeMillis() - startTime), Thread.currentThread().getId());
while (resultSet.next()) {
JdbcJavaBean tableDataSet = new JdbcJavaBean(columns);
for (int i = 1; i <= columns.size(); i++) {
Type type = types.get(columns.get(i - 1));
if (type.equals(BooleanType.BOOLEAN)) {
tableDataSet.setFieldObjectValue((i - 1), resultSet.getBoolean(i));
}
else if (type.equals(BigintType.BIGINT)) {
tableDataSet.setFieldObjectValue((i - 1), resultSet.getLong(i));
}
else if (type.equals(DateType.DATE)) {
tableDataSet.setFieldObjectValue((i - 1), resultSet.getDate(i));
}
else if (type.equals(TimeType.TIME)) {
tableDataSet.setFieldObjectValue((i - 1), resultSet.getTime(i));
}
else if (type.equals(TimestampType.TIMESTAMP)) {
tableDataSet.setFieldObjectValue((i - 1), resultSet.getTimestamp(i));
}
else if (type.equals(DoubleType.DOUBLE)) {
tableDataSet.setFieldObjectValue((i - 1), resultSet.getDouble(i));
}
else if (type.equals(VarcharType.VARCHAR)) {
tableDataSet.setFieldObjectValue((i - 1), resultSet.getString(i));
}
else if (type.equals(VarbinaryType.VARBINARY)) {
tableDataSet.setFieldObjectValue((i - 1), resultSet.getBytes(i));
}
}
list.add(tableDataSet);
}
log.debug("The connection url: %s ,parse result: %s ,spend time : %s , thread id : %s", key.getConnectionUrl(), sql, (System.currentTimeMillis() - startTime), Thread.currentThread().getId());
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
return list;
}
public List<JdbcJavaBean> getResult(JdbcCacheSplit key)
{
try {
return jdbcResultCache.get(key);
}
catch (ExecutionException e) {
throw Throwables.propagate(e);
}
}
public HashMap<String, Type> getColumnTypes(JdbcCacheSplit key)
{
HashMap<String, Type> types = new HashMap<String, Type>();
try (Connection connection = getConnection(key.getConnectionUrl())) {
DatabaseMetaData metadata = connection.getMetaData();
try (ResultSet resultSet = metadata.getColumns(key.getSchemaName(), key.getCatalogName(), key.getTableName(), null)) {
while (resultSet.next()) {
Type columnType = JdbcUtil.toPrestoType(resultSet.getInt("DATA_TYPE"));
if (columnType != null) {
String columnName = resultSet.getString("COLUMN_NAME").toLowerCase(ENGLISH);
types.put(columnName, columnType);
}
}
}
}
catch (SQLException e) {
throw Throwables.propagate(e);
}
return types;
}
public Connection getConnection(String connectionURL)
throws SQLException
{
Connection connection = driver.connect(connectionURL, connectionProperties);
try {
connection.setReadOnly(true);
}
catch (SQLException e) {
connection.close();
throw e;
}
return connection;
}
private void analyseCacheTableAndField(String cacheTableConfig, String cacheTableClause)
{
ObjectMapper objectMapper = new ObjectMapper();
try {
// table name and column field
List<LinkedHashMap<String, Object>> readValue = objectMapper.readValue(cacheTableConfig.toLowerCase(ENGLISH), ArrayList.class);
for (LinkedHashMap<String, Object> map : readValue) {
for (String t : map.keySet()) {
tableList.add(t);
ArrayList<String> object = (ArrayList<String>) map.get(t);
fieldList.put(t, object);
}
}
if (!isNullOrEmpty(cacheTableClause)) {
// table where condition
cacheTableClauseMap = (LinkedHashMap<String, String>) objectMapper.readValue(cacheTableClause, Map.class);
}
}
catch (JsonParseException e) {
throw Throwables.propagate(e);
}
catch (JsonMappingException e) {
throw Throwables.propagate(e);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
public boolean isCacheTable(String tableName)
{
return tableList.contains(tableName);
}
}

@ -0,0 +1,255 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc.subtable;
import static java.util.Locale.ENGLISH;
import io.airlift.log.Logger;
import io.airlift.units.Duration;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.commons.dbutils.DbUtils;
import org.apache.commons.dbutils.QueryRunner;
import org.apache.commons.dbutils.ResultSetHandler;
import com.facebook.presto.plugin.jdbc.JdbcSplit;
import com.facebook.presto.plugin.jdbc.subtable.PdboTableInfo.DBType;
import com.facebook.presto.plugin.jdbc.util.JdbcUtil;
import com.facebook.presto.plugin.jdbc.util.PdboMetadata;
import com.facebook.presto.server.PrestoServer;
import com.mysql.jdbc.Driver;
public class JdbcLoadTread implements Runnable
{
private static final Logger log = Logger.get(JdbcLoadTread.class);
protected final String connectionUrl;
protected final Properties connectionProperties;
protected final String connectorId;
protected final Duration jdbcReloadSubtableInterval;
private long lastLoadSubTableTimeStamp = 0L;
protected final Driver driver;
private final boolean jdbcSubTableAllocator;
private final ConcurrentMap<PdboTableInfo, ArrayList<PdboSplit>> pdboTables = new ConcurrentHashMap<>();
public JdbcLoadTread(String connectionUrl,
Properties connectionProperties,
String connectorId,
Duration jdbcReloadSubtableInterval) throws SQLException
{
this.connectionUrl = connectionUrl;
this.connectionProperties = connectionProperties;
this.connectorId = connectorId;
this.jdbcReloadSubtableInterval = jdbcReloadSubtableInterval;
this.driver = new Driver();
this.jdbcSubTableAllocator = PrestoServer.isCoordinator();
}
public void run()
{
while (jdbcSubTableAllocator) {
try {
if (lastLoadSubTableTimeStamp == 0) {
loadPdboTableInfo();
}
else {
Thread.sleep(jdbcReloadSubtableInterval.toMillis());
long curTime = System.currentTimeMillis();
loadPdboTableInfo();
log.debug(connectorId + " load sub-table info spend time : " + (System.currentTimeMillis() - curTime) + " ms ");
}
lastLoadSubTableTimeStamp = System.currentTimeMillis();
}
catch (Exception e) {
lastLoadSubTableTimeStamp = System.currentTimeMillis();
log.error(e, connectorId + " Error reloading sub-table infomation : %s" , e.getMessage());
}
}
}
public synchronized void loadPdboTableInfo()
{
String sql = PdboMetadata.getPdboTableInfoSQL();
Connection conn = getConnection();
QueryRunner runner = new QueryRunner();
try {
runner.query(conn, sql, new PdboTableResultHandle(), connectorId);
}
catch (SQLException e) {
log.error(e, "loadPdboTableInfo Execute %s error : %s" , sql, e.getMessage());
}
finally {
DbUtils.closeQuietly(conn);
}
}
public List<PdboSplit> getPDBOLogs(String connectorId, String schemaName, String tableName)
{
String sql = PdboMetadata.getPdboLogsSQL();
Connection conn = getConnection();
QueryRunner runner = new QueryRunner();
List<PdboSplit> pdboSplits = null;
try {
pdboSplits = runner.query(conn, sql, new PdboLogsResultHandle(), connectorId, schemaName, tableName);
}
catch (SQLException e) {
log.error(e, "getPDBOLogs Execute %s error : %s" , sql, e.getMessage());
}
finally {
DbUtils.closeQuietly(conn);
}
return pdboSplits;
}
private class PdboLogsResultHandle implements ResultSetHandler<List<PdboSplit>>
{
@Override
public List<PdboSplit> handle(ResultSet rs) throws SQLException
{
List<PdboSplit> tables = new ArrayList<>();
int scannodenumber = 0;
//A.CONNECTORID,A.SCHEMANAME,A.TABLENAME,A.ROWS,A.BEGININDEX,A.ENDINDEX,B.DBTYPE,
//C.DBHOST,C.DBPORT,C.CONNECTION_PROPERTIES,C.PRESTO_WORK_HOST,C.REMOTELYACCESSIBLE,C.SPLITFIELD,
//C.SCANNODENUMBER,D.USERNAME,D.PASSWORD,A.CONTROL_SCAN_CONCURRENCY_ENABLED,A.SCAN_CONCURRENCY_COUNT
while (rs.next()) {
scannodenumber = rs.getInt(14);
tables.add(new PdboSplit().setConnectorId(rs.getString(1)).
setSchemaName(rs.getString(2)).
setTableName(rs.getString(3)).
setRows(rs.getLong(4)).
setBeginIndex(rs.getLong(5)).
setEndIndex(rs.getLong(6)).
setDbHost(rs.getString(8)).
setConnectionUrl(getConnectionURL(rs.getString(7), rs.getString(8), rs.getString(9), rs.getString(10))).
setPrestoWorkHost(rs.getString(11)).
setRemotelyAccessible(rs.getString(12)).
setSplitField(rs.getString(13)).
setScanNodes(rs.getInt(14)).
setUsername(JdbcUtil.Base64Decode(rs.getString(15))).
setPassword(JdbcUtil.Base64Decode(rs.getString(16))).
setCalcStepEnable("Y").
setControlScanConcurrencyEnabled(rs.getString(17)).
setScanConcurrencyCount(rs.getInt(18)));
}
if (scannodenumber != tables.size()) {
tables.clear();
loadPdboTableInfo();
}
return tables;
}
}
private class PdboTableResultHandle implements ResultSetHandler<String>
{
@Override
public String handle(ResultSet rs) throws SQLException
{
pdboTables.clear();
//CONNECTORID,PRESTO_SCHEMA,PRESTO_TABLE,DBTYPE,PDBOENABLE,CONTROL_SCAN_CONCURRENCY_ENABLED,SCAN_CONCURRENCY_COUNT
//DBHOST,DBPORT,CONNECTION_PROPERTIES,SOURCE_SCHEMA,SOURCE_TABLE,SPLITFIELD,REMOTELYACCESSIBLE,PRESTO_WORK_HOST,SCANNODENUMBER,
//FIELDMAXVALUE,FIELDMINVALUE,USERNAME,PASSWORD,"
while (rs.next()) {
PdboTableInfo table = new PdboTableInfo(rs.getString(1).toLowerCase(ENGLISH),
rs.getString(2).toLowerCase(ENGLISH), rs.getString(3).toLowerCase(ENGLISH));
table.setDbType(rs.getString(4));
table.setCalcStepEnable(rs.getString(5));
table.setControlScanConcurrencyEnabled(rs.getString(6));
table.setScanConcurrencyCount(rs.getInt(7));
String connectionUrl = getConnectionURL(table.getDbType(), rs.getString(8), rs.getString(9), rs.getString(10));
PdboSplit pdboSplit = new PdboSplit().setSchemaName(rs.getString(11).toLowerCase(ENGLISH)).
setTableName(rs.getString(12).toLowerCase(ENGLISH)).
setDbHost(rs.getString(8)).
setConnectionUrl(connectionUrl).
setSplitField(rs.getString(13)).
setRemotelyAccessible(rs.getString(14)).
setPrestoWorkHost(rs.getString(15)).
setScanNodes(rs.getInt(16)).
setFieldMaxValue(rs.getLong(17)).
setFieldMinValue(rs.getLong(18)).
setUsername(JdbcUtil.Base64Decode(rs.getString(19))).
setPassword(JdbcUtil.Base64Decode(rs.getString(20))).
setCalcStepEnable(rs.getString(5)).
setControlScanConcurrencyEnabled(rs.getString(6)).
setScanConcurrencyCount(rs.getInt(7));
ArrayList<PdboSplit> routeList = pdboTables.get(table);
if (routeList == null) {
routeList = new ArrayList<>();
}
routeList.add(pdboSplit);
pdboTables.put(table, routeList);
}
return null;
}
}
private String getConnectionURL(String dbType, String dbHost, String dbPort, String connectionProperties)
throws SQLException
{
String connectionUrl = "";
if (dbType.equals(DBType.MYSQL.toString())) {
connectionUrl = "jdbc:mysql://";
}
else if (dbType.equals(DBType.SQLSERVER.toString())) {
connectionUrl = "jdbc:jtds:sqlserver://";
}
else if (dbType.equals(DBType.ORACLE.toString())) {
connectionUrl = "jdbc:oracle:thin:@";
}
connectionUrl += dbHost + ":" + dbPort + connectionProperties;
return connectionUrl;
}
public ConcurrentMap<PdboTableInfo, ArrayList<PdboSplit>> getPdboTableInfo()
{
return pdboTables;
}
public void commitPdboLogs(JdbcSplit split, long rowCount)
{
Connection conn = getConnection();
QueryRunner runner = new QueryRunner();
String insertSql = PdboMetadata.getInsertPdboLogSQL(split, rowCount, connectorId);
String updateSql = PdboMetadata.getUpdatePdboHistoryLogSQL(split, connectorId);
try {
runner.update(conn, updateSql);
runner.update(conn, insertSql);
}
catch (SQLException e) {
log.error(e, "insert sql : %s,update sql : %s commitPdboLogs error : %s", insertSql, updateSql, e.getMessage());
}
finally {
DbUtils.closeQuietly(conn);
}
}
public Connection getConnection()
{
Connection conn = null;
try {
conn = driver.connect(connectionUrl, connectionProperties);
}
catch (SQLException e) {
log.error("Connect pdbo db error : " + e.getMessage());
}
return conn;
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save