Flink TableEnvironment.create throws NoSuchMethodError - apache-flink

I am testing flink hive connector, following the instruction here https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/connectors/table/hive/overview/.
The final code is as follows. I tried to run it in Intellij IDE. Unfortunately, it doesn't work. TableEnvironment.create throws NoSuchMethodError
public static void main(String[] args) throws Exception {
EnvironmentSettings settings = EnvironmentSettings.inStreamingMode();
TableEnvironment tableEnv = TableEnvironment.create(settings); // throws NoSuchMethodError
String name = "myhive";
String defaultDatabase = "default";
String hiveConfDir = "/Users/gaoxiahong/apache-hive-3.1.2-bin/conf";
HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir);
tableEnv.registerCatalog(name, hive);
tableEnv.useCatalog(name);
System.out.println(tableEnv.executeSql("show tables"));
}
Exception message is as follows:
Exception in thread "main" java.lang.NoSuchMethodError: org.apache.calcite.sql.parser.SqlParser.config()Lorg/apache/calcite/sql/parser/SqlParser$Config;
at org.apache.flink.table.planner.delegation.PlannerContext.lambda$getSqlParserConfig$1(PlannerContext.java:263)
at java.util.Optional.orElseGet(Optional.java:267)
at org.apache.flink.table.planner.delegation.PlannerContext.getSqlParserConfig(PlannerContext.java:257)
at org.apache.flink.table.planner.delegation.PlannerContext.createFrameworkConfig(PlannerContext.java:148)
at org.apache.flink.table.planner.delegation.PlannerContext.<init>(PlannerContext.java:130)
at org.apache.flink.table.planner.delegation.PlannerBase.<init>(PlannerBase.scala:116)
at org.apache.flink.table.planner.delegation.StreamPlanner.<init>(StreamPlanner.scala:62)
at org.apache.flink.table.planner.delegation.DefaultPlannerFactory.create(DefaultPlannerFactory.java:64)
at org.apache.flink.table.factories.PlannerFactoryUtil.createPlanner(PlannerFactoryUtil.java:52)
at org.apache.flink.table.api.internal.TableEnvironmentImpl.create(TableEnvironmentImpl.java:302)
at org.apache.flink.table.api.TableEnvironment.create(TableEnvironment.java:93)
at com.yqg.flinkhive.Test.main(Test.java:18)
My flink version is 1.15.2 and hive version is 3.1.2. The pom.xml file looks like:
<properties>
<flink.version>1.15.2</flink.version>
<hive.version>3.1.2</hive.version>
<scala.version>2.12</scala.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-hive_${scala.version}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_${scala.version}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
Can anyone help me figure out the issue here? Thanks in advane~

Per https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/dev/configuration/overview/ you most likely need to swap flink-table-api-java-bridge to flink-table-api-scala-bridge_2.12.
See also https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/connectors/table/hive/overview/#program-maven

Related

An error occurs when I query the oracle database using flink sql cdc

environment are as follows:
linux: centos 7 (A Virtual machine on VMware Workstation Pro)
oracle: 11.2.0.4 (archivelog mode already started)
flink: 1.13.6
flink cdc connector: 2.2.0
java: 1.8
I try to query data from oracle database using flink sql cdc in IDEA, however, I encountered some errors,and the error are as follows:
My java code are as follows:
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
public class FlinkSQL_CDC_JDBC_Oracle {
public static StreamTableEnvironment getTableEnvironment(){
EnvironmentSettings settings = EnvironmentSettings
.newInstance()
.useBlinkPlanner()
.inStreamingMode()
.build();
StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(sEnv,settings);
return tableEnvironment;
}
public static String buildSourceTable() {
String sql = "CREATE TABLE `a` (\n" +
" ID BIGINT,\n" +
" NAME VARCHAR,\n" +
" PRIMARY KEY(NAME) NOT ENFORCED )\n" +
" WITH (\n" +
" 'connector' = 'oracle-cdc',\n" +
// 请修改成 Oracle 所在的实际 IP 地址
" 'hostname' = 'hadoop104',\n" +
" 'port' = '1521',\n" +
" 'username' = 'oracle',\n" +
" 'password' = 'oracle',\n" +
" 'database-name' = 'ORCL',\n" +
" 'schema-name' = 'oracle',\n" +
" 'table-name' = 'A',\n" +
//"'scan.startup.mode'='latest-offset'," +
"'debezium.log.mining.strategy'='online_catalog'," +
"'debezium.database.tablename.case.insensitive' = 'false'," +
"'debezium.log.mining.continuous.mine'='true'" +
")";
return sql;
}
public static void test(){
StreamTableEnvironment tableEnvironment = getTableEnvironment();
tableEnvironment.executeSql(buildSourceTable());
tableEnvironment.executeSql("select * from a").print();
}
public static void main(String[] args) {
test();
}
}
My pom file is:
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>Flink_Oracle</artifactId>
<version>1.0</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<flink.version>1.13.6</flink.version>
<scala.binary.version>2.11</scala.binary.version>
<!-- <mysql.version>5.7.16</mysql.version>-->
<gson.version>2.8.6</gson.version>
</properties>
<dependencies>
<!-- <dependency>-->
<!-- <groupId>org.apache.flink</groupId>-->
<!-- <artifactId>flink-shaded-guava</artifactId>-->
<!-- <version>18.0-7.0</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.ververica</groupId>-->
<!-- <artifactId>flink-cdc-base</artifactId>-->
<!-- <version>2.3.0</version>-->
<!-- </dependency>-->
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-connector-oracle-cdc</artifactId>
<version>2.2.0</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.flink</groupId>-->
<!-- <artifactId>flink-jdbc_${scala.binary.version}</artifactId>-->
<!-- <version>${flink.version}</version>-->
<!-- <scope>system</scope>-->
<!-- <systemPath>${project.basedir}/src/main/resources/libs/flink-connector-jdbc_2.11-1.13.6.jar</systemPath>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.oracle.database.jdbc</groupId>-->
<!-- <artifactId>ojdbc6</artifactId>-->
<!-- <version>11.2.0.4</version>-->
<!-- </dependency>-->
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-core -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-core</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-common</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${gson.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<!--如果要打包的话,这里要换成对应的 main class-->
<mainClass>com.flink.cdc.demo.MysqlCdcMysql</mainClass>
</transformer>
<transformer
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>reference.conf</resource>
</transformer>
</transformers>
<filters>
<filter>
<artifact>*:*:*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
I searched the Internet for solution about error code ORA-00604 and the answer was that the system tablespace was insufficient, however, the problem continued after I added the system tablespace;
I searched the Internet for solution about error code ORA-12705 and the answer was that the NLS_LANG variable was configured incorrectly, however, my NLS_LANG variable was configured correctly, here, NLS_LANG=AMERICAN_AMERICA.AL32UTF8, The NLS_LANG value on the registry is the same as that on the oracle database; However, everything is ok when I used flink sql jdbc to query an oracle database;
Finally, I really don't know what to do to solve the problem.
I hope someone can help me, if possible, through the remote desktop to help me to solve the problem., I am willing to pay some reward in return.

Does org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer implements SinkFunction<T> sinkFunction

I am trying to implement a simple flink job that use org.apache.flink.streaming.connectors, take a Kafka topic as input source and output to a Kafka sink. I am following this guide https://ci.apache.org/projects/flink/flink-docs-release-1.13/docs/connectors/datastream/kafka/ and write code as such
FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(TOPIC_IN, new SimpleStringSchema(), props); //FlinkKafkaConsumer<String> testKafkaConsumer = new FlinkKafkaConsumer<>(TOPIC_TEST, new SimpleStringSchema(), props);
kafkaConsumer.setStartFromEarliest();
DataStream<String> dataStream = env.addSource(kafkaConsumer);
StringSchema stringSchema = new StringSchema(TOPIC_OUT);
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<>(TOPIC_OUT, stringSchema, props, FlinkKafkaProducer.Semantic.EXACTLY_ONCE);
//addSink((SinkFunction<String>) kafkaProducer);
dataStream.addSink(kafkaProducer);
However, addSinkneeds SinkFunction while I provide a FlinkKafkaProducer, which extends TwoPhaseCommitSinkFunction. I am confused why it complains and not works.
My pom.xml file is as follows
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.11</artifactId>
<version>1.13.0</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-base</artifactId>
<version>1.13.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-streaming-java -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.12</artifactId>
<version>1.13.2</version>
<scope>provided</scope>
</dependency>
seems this class has been deprecated https://ci.apache.org/projects/flink/flink-docs-master/api/java/org/apache/flink/streaming/connectors/kafka/package-summary.html.
There is no FlinkKafkaProducer constructor with the method signature you're using. You could use this one:
public FlinkKafkaProducer(
String topicId,
SerializationSchema<IN> serializationSchema,
Properties producerConfig,
#Nullable FlinkKafkaPartitioner<IN> customPartitioner,
FlinkKafkaProducer.Semantic semantic,
int kafkaProducersPoolSize)

Springboot starter for Apache Camel (AMQP) doesn't find ConnectionFactory bean

I created an application to read messages from Apache qpid and to send them on Apache kafka. I am using Camel with Springboot starter. My Pom looks like this -
<dependencyManagement>
<dependencies>
<!-- Camel BOM -->
<dependency>
<groupId>org.apache.camel.springboot</groupId>
<artifactId>camel-spring-boot-dependencies</artifactId>
<version>${camel.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<!-- ... other BOMs or dependencies ... -->
</dependencies>
</dependencyManagement>
<dependencies>
<!-- starters -->
<dependency>
<groupId>org.apache.camel.springboot</groupId>
<artifactId>camel-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.apache.camel.springboot</groupId>
<artifactId>camel-amqp-starter</artifactId>
</dependency>
<dependency>
<groupId>org.apache.camel.springboot</groupId>
<artifactId>camel-kafka-starter</artifactId>
</dependency>
<!-- other camel dependencies -->
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-spring</artifactId>
</dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-amqp</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>2.3.0.RELEASE</version>
</plugin>
</plugins>
</build>
and Spring application class is -
#SpringBootApplication
public class CamelSpringJmsKafkaApplication {
public static void main(String[] args) {
SpringApplication.run(CamelSpringJmsKafkaApplication.class, args);
}
#Bean
public JmsConnectionFactory jmsConnectionFactory(#Value("${qpidUser}") String qpidUser, #Value("${qpidPassword}") String qpidPassword, #Value("${qpidBrokerUrl}") String qpidBrokerUrl) {
JmsConnectionFactory jmsConnectionFactory = new JmsConnectionFactory(qpidPassword, qpidPassword, qpidBrokerUrl);
return jmsConnectionFactory;
}
#Bean
#Primary
public CachingConnectionFactory jmsCachingConnectionFactory(JmsConnectionFactory jmsConnectionFactory) {
CachingConnectionFactory cachingConnectionFactory = new CachingConnectionFactory(jmsConnectionFactory);
return cachingConnectionFactory;
}
application.properties is -
camel.springboot.main-run-controller = true
camel.component.amqp.enabled = true
camel.component.amqp.connection-factory = jmsCachingConnectionFactory
camel.component.amqp.async-consumer = true
camel.component.amqp.concurrent-consumers = 1
camel.component.amqp.map-jms-message = true
camel.component.amqp.test-connection-on-startup = true
camel.component.kafka.brokers = localhost:9092
qpidBrokerUrl = amqp://localhost:5672?jms.username=guest&jms.password=guest&jms.clientID=clientid2&amqp.vhost=default
qpidUser = guest
qpidPassword = guest
RouteBuilder is -
#Component
public class QpidToKafkaRoute extends RouteBuilder {
public void configure() throws Exception {
from("amqp:queue:test")
.log("Received key : ${header.JMSMessageID}, message : ${body}")
.setHeader(KafkaConstants.KEY, header("JMSMessageID"))
.to("kafka:camel")
.log("Sent key : ${headers[kafka.KEY]}, message : ${body}");
}
}
When I start this application, it throws following exception -
org.apache.camel.FailedToStartRouteException: Failed to start route route1 because of null
at org.apache.camel.impl.engine.RouteService.warmUp(RouteService.java:125) ~[camel-base-3.4.0.jar:3.4.0]
Caused by: java.lang.IllegalArgumentException: connectionFactory must be specified
at org.apache.camel.util.ObjectHelper.notNull(ObjectHelper.java:152) ~[camel-util-3.4.0.jar:3.4.0]
at org.apache.camel.component.jms.JmsConfiguration.createConnectionFactory(JmsConfiguration.java:1629) ~[camel-jms-3.4.0.jar:3.4.0]
at org.apache.camel.component.jms.JmsConfiguration.getOrCreateConnectionFactory(JmsConfiguration.java:773) ~[camel-jms-3.4.0.jar:3.4.0]
at org.apache.camel.component.jms.JmsConfiguration.createListenerConnectionFactory(JmsConfiguration.java:1638) ~[camel-jms-3.4.0.jar:3.4.0]
at org.apache.camel.component.jms.JmsConfiguration.getOrCreateListenerConnectionFactory(JmsConfiguration.java:816) ~[camel-jms-3.4.0.jar:3.4.0]
at org.apache.camel.component.jms.JmsConfiguration.configureMessageListenerContainer(JmsConfiguration.java:1468) ~[camel-jms-3.4.0.jar:3.4.0]
at org.apache.camel.component.jms.JmsConfiguration.createMessageListenerContainer(JmsConfiguration.java:725) ~[camel-jms-3.4.0.jar:3.4.0]
at org.apache.camel.component.jms.JmsEndpoint.createMessageListenerContainer(JmsEndpoint.java:189) ~[camel-jms-3.4.0.jar:3.4.0]
at org.apache.camel.component.jms.JmsEndpoint.createConsumer(JmsEndpoint.java:184) ~[camel-jms-3.4.0.jar:3.4.0]
at org.apache.camel.component.jms.JmsEndpoint.createConsumer(JmsEndpoint.java:73) ~[camel-jms-3.4.0.jar:3.4.0]
at org.apache.camel.impl.engine.DefaultRoute.addServices(DefaultRoute.java:560) ~[camel-base-3.4.0.jar:3.4.0]
at org.apache.camel.impl.engine.DefaultRoute.onStartingServices(DefaultRoute.java:166) ~[camel-base-3.4.0.jar:3.4.0]
at org.apache.camel.impl.engine.RouteService.doWarmUp(RouteService.java:153) ~[camel-base-3.4.0.jar:3.4.0]
at org.apache.camel.impl.engine.RouteService.warmUp(RouteService.java:123) ~[camel-base-3.4.0.jar:3.4.0]
Could you please help suggest why during autoconfiguring Springboot is not finding connectionFactory? When I debug this code, I can see connectionFactory bean is getting created. I can even see one more log line -
CamelContext has only been running for less than a second. If you intend to run Camel for a longer time then you can set the property camel.springboot.main-run-controller=true in application.properties or add spring-boot-starter-web JAR to the classpath.
however if you see my application.properties file, required property is present at the very beginning.
One more log, I can see at the beginning of application startup -
[main] trationDelegate$BeanPostProcessorChecker : Bean 'org.apache.camel.spring.boot.CamelAutoConfiguration' of type [org.apache.camel.spring.boot.CamelAutoConfiguration] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
Note - One intresting fact that exactly same code was running fine last night, just restarted my desktop and there is not even a single word changed and now it is throwing exception. Code can also be seen here - https://github.com/prashantbhardwaj/qpid-to-kafka-using-camel

Apache Cxf Webclient Doen't Work As Expected in Tomee 8

I am trying to get jwk keyset from google for use with Apache Cxf OIDC and Jose Libs. The code works fine when I run it on a stand alone main method.
public class Main {
/**
* #param args the command line arguments
*/
public static void main(String[] args) {
final WebClient client = WebClient.create("https://www.googleapis.com/oauth2/v3/certs", Arrays.asList(new JsonWebKeysProvider()), true).accept(MediaType.APPLICATION_JSON);
JsonWebKeys keys = client.get(JsonWebKeys.class);
keys.getKeys().forEach(key -> {
System.out.println("****************************************************************************");
System.out.println("ID........." + key.getKeyId());
System.out.println("Alg........" + key.getAlgorithm());
System.out.println("Key Type..." + key.getKeyType());
System.out.println("Use........" + key.getPublicKeyUse());
});
}
}
The ID, algorithm, key type and use is printed properly meaning that the keys are property populated.
Sample output:
****************************************************************************
ID.........79c809dd1186cc228c4baf9358599530ce92b4c8
Alg........RS256
Key Type...RSA
Use........sig
****************************************************************************
ID.........17d55ff4e10991d6b0efd392b91a33e54c0e218b
Alg........RS256
Key Type...RSA
Use........sig
pom.xml extract for Main class.
<dependencies>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-rs-client</artifactId>
<version>3.3.5</version>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-rs-security-sso-oidc</artifactId>
<version>3.3.5</version>
</dependency>
</dependencies>
The same code however doesn't work when deployed in Tomee 8.
#WebServlet(name = "NewServlet", urlPatterns = {"/x"})
public class NewServlet extends HttpServlet {
#Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
PrintWriter writer = response.getWriter();
final WebClient client = WebClient.create("https://www.googleapis.com/oauth2/v3/certs", Arrays.asList(new JsonWebKeysProvider()), true).accept(MediaType.APPLICATION_JSON);
JsonWebKeys keys = client.get(JsonWebKeys.class);
keys.getKeys().forEach(key -> {
writer.println("****************************************************************************");
writer.println("ID........." + key.getKeyId());
writer.println("Alg........" + key.getAlgorithm());
writer.println("Key Type..." + key.getKeyType());
writer.println("Use........" + key.getPublicKeyUse());
});
}
}
The ID, algorithm, key type and use is null when this code runs in Tomee 8. I have added cxf oidc lib and jose jars are installed in tomee/lib folder.
Sample output:
****************************************************************************
ID.........null
Alg........null
Key Type...null
Use........null
****************************************************************************
ID.........null
Alg........null
Key Type...null
Use........null
pom.xml extract for the servlet.
<dependencies>
<dependency>
<groupId>org.apache.tomee</groupId>
<artifactId>javaee-api</artifactId>
<version>8.0-3</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-frontend-jaxrs</artifactId>
<version>${cxf.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-rs-security-sso-oidc</artifactId>
<version>${cxf.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-rt-rs-client</artifactId>
<version>${cxf.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
What is causing this issue?
I realized that when Webclient is created inside tomee, it picks up bus properties provided by tomee which was causing JsonWebKeysProvider not to be invoked.
In my case below is the correct way to create the client inside tomee.
JAXRSClientFactoryBean sf = new JAXRSClientFactoryBean();
sf.setAddress("https://www.googleapis.com/oauth2/v3/certs");
sf.setProvider(new JsonWebKeysProvider());
sf.setBus(new ExtensionManagerBus());
Calling sf.setBus(new ExtensionManagerBus()); ensures tomee provided values/properties aren't picked up.

Apache beam 2.2 dependency not able to get the data from cloud storage

This is my code to read csv
//DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
PipelineOptions options=PipelineOptionsFactory.create();
//options.setProject("ProjectId");
//options.setStagingLocation("gs://bucketname/Object");
options.setRunner(DirectRunner.class);
options.setTempLocation("gs://bucketname/Object");
Pipeline p = Pipeline.create(options);
p.apply(FileIO.match().filepattern("gs://bucketname/objectname.csv")).apply(FileIO.readMatches())
.apply(ParDo.of(new checkSplitter()));
p.run();
}
static class checkSplitter extends DoFn<ReadableFile, String> {
/**
*
*/
private static final long serialVersionUID = 1L;
int rown = 1;
String line;
#ProcessElement
public void processElement(ProcessContext c) throws Exception {
try (InputStream is = Channels.newInputStream(c.element().open())) {
BufferedReader bReader = new BufferedReader(new InputStreamReader(is));
while ((line = bReader.readLine()) != null) {
System.out.println(line);
}
}
}
}
My pom.xml:
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.1.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>beam-sdks-java-core</artifactId>
<version>2.2.0</version>
</dependency>
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>beam-runners-direct-java</artifactId>
<version>2.2.0</version>
<scope>runtime</scope>
</dependency>
<!-- slf4j API frontend binding with JUL backend -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.7</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-jdk14</artifactId>
<version>1.7.7</version>
</dependency>
<dependency>
<groupId>com.google.apis</groupId>
<artifactId>google-api-services-pubsub</artifactId>
<version>v1-rev12-1.20.0</version>
<exclusions>
<exclusion>
<artifactId>guava-jdk5</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.code.gson/gson -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.googlecode.json-simple/json-simple -->
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
<version>1.1.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-codec/commons-codec -->
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.9</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.cloud/google-cloud-bigquery -->
<dependency>
<groupId>com.google.cloud</groupId>
<artifactId>google-cloud-bigquery</artifactId>
<version>0.30.0-beta</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.7.5</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-io -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-io</artifactId>
<version>1.3.2</version>
</dependency>
<dependency>
<groupId>com.google.appengine.tools</groupId>
<artifactId>appengine-gcs-client</artifactId>
<version>0.6</version>
</dependency>
<!-- <dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.14</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-jdk14</artifactId>
<version>1.7.14</version>
</dependency> -->
<!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>20.0</version>
</dependency>
The error is :
Exception in thread "main" org.apache.beam.sdk.Pipeline$PipelineExecutionException: java.nio.file.InvalidPathException: Illegal char <:> at index 2: gs://bucketname/object.csv
at org.apache.beam.runners.direct.DirectRunner$DirectPipelineResult.waitUntilFinish(DirectRunner.java:344)
at org.apache.beam.runners.direct.DirectRunner$DirectPipelineResult.waitUntilFinish(DirectRunner.java:314)
at org.apache.beam.runners.direct.DirectRunner.run(DirectRunner.java:208)
at org.apache.beam.runners.direct.DirectRunner.run(DirectRunner.java:62)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:303)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:289)
at com.pearson.dataflow.StarterPipeline.main(StarterPipeline.java:107)
Caused by: java.nio.file.InvalidPathException: Illegal char <:> at index 2: gs://bucketname/object.csv
at sun.nio.fs.WindowsPathParser.normalize(WindowsPathParser.java:176)
at sun.nio.fs.WindowsPathParser.parse(WindowsPathParser.java:147)
at sun.nio.fs.WindowsPathParser.parse(WindowsPathParser.java:77)
at sun.nio.fs.WindowsPath.parse(WindowsPath.java:94)
at sun.nio.fs.WindowsFileSystem.getPath(WindowsFileSystem.java:255)
at java.nio.file.Paths.get(Paths.java:84)
at org.apache.beam.sdk.io.LocalFileSystem.matchOne(LocalFileSystem.java:219)
at org.apache.beam.sdk.io.LocalFileSystem.match(LocalFileSystem.java:89)
at org.apache.beam.sdk.io.FileSystems.match(FileSystems.java:125)
at org.apache.beam.sdk.io.FileSystems.match(FileSystems.java:147)
at org.apache.beam.sdk.io.FileSystems.match(FileSystems.java:159)
at org.apache.beam.sdk.io.FileIO$MatchAll$MatchFn.process(FileIO.java:341)
I have attached the the code and pom.xml with the error
so so many build in packages in 2.1 are not able to found in 2.2 please advice me or if there is any working example to run it in local please share me the link.
should i missed out any dependencies or any error in code please help me
help will be appreciated
Thanks in Advance

Resources