An error occurs when I query the oracle database using flink sql cdc - apache-flink

environment are as follows:
linux: centos 7 (A Virtual machine on VMware Workstation Pro)
oracle: 11.2.0.4 (archivelog mode already started)
flink: 1.13.6
flink cdc connector: 2.2.0
java: 1.8
I try to query data from oracle database using flink sql cdc in IDEA, however, I encountered some errors,and the error are as follows:
My java code are as follows:
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
public class FlinkSQL_CDC_JDBC_Oracle {
public static StreamTableEnvironment getTableEnvironment(){
EnvironmentSettings settings = EnvironmentSettings
.newInstance()
.useBlinkPlanner()
.inStreamingMode()
.build();
StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment();
StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(sEnv,settings);
return tableEnvironment;
}
public static String buildSourceTable() {
String sql = "CREATE TABLE `a` (\n" +
" ID BIGINT,\n" +
" NAME VARCHAR,\n" +
" PRIMARY KEY(NAME) NOT ENFORCED )\n" +
" WITH (\n" +
" 'connector' = 'oracle-cdc',\n" +
// 请修改成 Oracle 所在的实际 IP 地址
" 'hostname' = 'hadoop104',\n" +
" 'port' = '1521',\n" +
" 'username' = 'oracle',\n" +
" 'password' = 'oracle',\n" +
" 'database-name' = 'ORCL',\n" +
" 'schema-name' = 'oracle',\n" +
" 'table-name' = 'A',\n" +
//"'scan.startup.mode'='latest-offset'," +
"'debezium.log.mining.strategy'='online_catalog'," +
"'debezium.database.tablename.case.insensitive' = 'false'," +
"'debezium.log.mining.continuous.mine'='true'" +
")";
return sql;
}
public static void test(){
StreamTableEnvironment tableEnvironment = getTableEnvironment();
tableEnvironment.executeSql(buildSourceTable());
tableEnvironment.executeSql("select * from a").print();
}
public static void main(String[] args) {
test();
}
}
My pom file is:
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>Flink_Oracle</artifactId>
<version>1.0</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<flink.version>1.13.6</flink.version>
<scala.binary.version>2.11</scala.binary.version>
<!-- <mysql.version>5.7.16</mysql.version>-->
<gson.version>2.8.6</gson.version>
</properties>
<dependencies>
<!-- <dependency>-->
<!-- <groupId>org.apache.flink</groupId>-->
<!-- <artifactId>flink-shaded-guava</artifactId>-->
<!-- <version>18.0-7.0</version>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.ververica</groupId>-->
<!-- <artifactId>flink-cdc-base</artifactId>-->
<!-- <version>2.3.0</version>-->
<!-- </dependency>-->
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-connector-oracle-cdc</artifactId>
<version>2.2.0</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.flink</groupId>-->
<!-- <artifactId>flink-jdbc_${scala.binary.version}</artifactId>-->
<!-- <version>${flink.version}</version>-->
<!-- <scope>system</scope>-->
<!-- <systemPath>${project.basedir}/src/main/resources/libs/flink-connector-jdbc_2.11-1.13.6.jar</systemPath>-->
<!-- </dependency>-->
<!-- <dependency>-->
<!-- <groupId>com.oracle.database.jdbc</groupId>-->
<!-- <artifactId>ojdbc6</artifactId>-->
<!-- <version>11.2.0.4</version>-->
<!-- </dependency>-->
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-core -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-core</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-common</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${gson.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<createDependencyReducedPom>false</createDependencyReducedPom>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<!--如果要打包的话,这里要换成对应的 main class-->
<mainClass>com.flink.cdc.demo.MysqlCdcMysql</mainClass>
</transformer>
<transformer
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>reference.conf</resource>
</transformer>
</transformers>
<filters>
<filter>
<artifact>*:*:*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
I searched the Internet for solution about error code ORA-00604 and the answer was that the system tablespace was insufficient, however, the problem continued after I added the system tablespace;
I searched the Internet for solution about error code ORA-12705 and the answer was that the NLS_LANG variable was configured incorrectly, however, my NLS_LANG variable was configured correctly, here, NLS_LANG=AMERICAN_AMERICA.AL32UTF8, The NLS_LANG value on the registry is the same as that on the oracle database; However, everything is ok when I used flink sql jdbc to query an oracle database;
Finally, I really don't know what to do to solve the problem.
I hope someone can help me, if possible, through the remote desktop to help me to solve the problem., I am willing to pay some reward in return.

Related

Flink TableEnvironment.create throws NoSuchMethodError

I am testing flink hive connector, following the instruction here https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/connectors/table/hive/overview/.
The final code is as follows. I tried to run it in Intellij IDE. Unfortunately, it doesn't work. TableEnvironment.create throws NoSuchMethodError
public static void main(String[] args) throws Exception {
EnvironmentSettings settings = EnvironmentSettings.inStreamingMode();
TableEnvironment tableEnv = TableEnvironment.create(settings); // throws NoSuchMethodError
String name = "myhive";
String defaultDatabase = "default";
String hiveConfDir = "/Users/gaoxiahong/apache-hive-3.1.2-bin/conf";
HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir);
tableEnv.registerCatalog(name, hive);
tableEnv.useCatalog(name);
System.out.println(tableEnv.executeSql("show tables"));
}
Exception message is as follows:
Exception in thread "main" java.lang.NoSuchMethodError: org.apache.calcite.sql.parser.SqlParser.config()Lorg/apache/calcite/sql/parser/SqlParser$Config;
at org.apache.flink.table.planner.delegation.PlannerContext.lambda$getSqlParserConfig$1(PlannerContext.java:263)
at java.util.Optional.orElseGet(Optional.java:267)
at org.apache.flink.table.planner.delegation.PlannerContext.getSqlParserConfig(PlannerContext.java:257)
at org.apache.flink.table.planner.delegation.PlannerContext.createFrameworkConfig(PlannerContext.java:148)
at org.apache.flink.table.planner.delegation.PlannerContext.<init>(PlannerContext.java:130)
at org.apache.flink.table.planner.delegation.PlannerBase.<init>(PlannerBase.scala:116)
at org.apache.flink.table.planner.delegation.StreamPlanner.<init>(StreamPlanner.scala:62)
at org.apache.flink.table.planner.delegation.DefaultPlannerFactory.create(DefaultPlannerFactory.java:64)
at org.apache.flink.table.factories.PlannerFactoryUtil.createPlanner(PlannerFactoryUtil.java:52)
at org.apache.flink.table.api.internal.TableEnvironmentImpl.create(TableEnvironmentImpl.java:302)
at org.apache.flink.table.api.TableEnvironment.create(TableEnvironment.java:93)
at com.yqg.flinkhive.Test.main(Test.java:18)
My flink version is 1.15.2 and hive version is 3.1.2. The pom.xml file looks like:
<properties>
<flink.version>1.15.2</flink.version>
<hive.version>3.1.2</hive.version>
<scala.version>2.12</scala.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-hive_${scala.version}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_${scala.version}</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
Can anyone help me figure out the issue here? Thanks in advane~
Per https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/dev/configuration/overview/ you most likely need to swap flink-table-api-java-bridge to flink-table-api-scala-bridge_2.12.
See also https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/connectors/table/hive/overview/#program-maven

Liquibase: 'dropForeignKeyConstrant' is not a valid element of a ChangeSet

Get this error:
Error setting up or running Liquibase: ChangeSet '02': 'dropForeignKeyConstrant' is not a valid element of a ChangeSet
pom.xml plugin(in .prop only credentials for db)
<plugin>
<groupId>org.liquibase</groupId>
<artifactId>liquibase-maven-plugin</artifactId>
<version>3.10.2</version>
<dependencies>
<dependency>
<groupId>org.liquibase</groupId>
<artifactId>liquibase-groovy-dsl</artifactId>
<version>2.1.2</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.2.15</version>
</dependency>
</dependencies>
<configuration>
<propertyFile>src/main/resources/liquibase.properties</propertyFile>
</configuration>
</plugin>
ChangeSet(.groovy)
rollback {
dropUniqueConstraint(schemaName: schema_name, tableName: 'bucket', constraintName: 'u_bucket_name')
dropUniqueConstraint(schemaName: schema_name, tableName: 'version', constraintName: 'u_bucket_key')
dropForeignKeyConstrant(baseTableSchemaName: schema_name, baseTableName: 'file', constraintName: 'file_type_fk')
dropForeignKeyConstrant(baseTableSchemaName: schema_name, baseTableName: 'license', constraintName: 'file_fk')
As you can see dropUniqueConstraint worked, but dropForeignKeyConstrant didn't
Why i got this error and how to fix this?
DropForeignKeyConstrant was written instead of dropForeignKeyConstraint (missing i). Thanks for your attention, #andi

Quarkus + Panache + RestEasy Native Image build fails

I have the following simplified setup:
1)
import javax.ws.rs.GET;
import javax.ws.rs.Path;
#Path("/api")
public class MyResource {
public MyResource() {
}
#GET
#Path("/myPath/")
public void get() {
}
}
2)
import io.quarkus.hibernate.orm.panache.PanacheEntity;
import javax.persistence.Entity;
#Entity
public class MyEntity extends PanacheEntity {
public String hello;
public MyEntity() {
//For Panache only
}
}
3) pom.xml:
[...]
<properties>
<compiler-plugin.version>3.8.1</compiler-plugin.version>
<maven.compiler.source>11</maven.compiler.source>
<maven.compiler.target>11</maven.compiler.target>
<quarkus-plugin.version>1.5.0.Final</quarkus-plugin.version>
<quarkus.platform.artifact-id>quarkus-universe-bom</quarkus.platform.artifact-id>
<quarkus.platform.group-id>io.quarkus</quarkus.platform.group-id>
<quarkus.platform.version>1.5.0.Final</quarkus.platform.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>${quarkus.platform.group-id}</groupId>
<artifactId>${quarkus.platform.artifact-id}</artifactId>
<version>${quarkus.platform.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-hibernate-orm-panache</artifactId>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-jdbc-mariadb</artifactId>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-resteasy</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-maven-plugin</artifactId>
<version>${quarkus-plugin.version}</version>
<executions>
<execution>
<goals>
<goal>build</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>native</id>
<activation>
<property>
<name>native</name>
</property>
</activation>
<build/>
<properties>
<quarkus.package.type>native</quarkus.package.type>
</properties>
</profile>
</profiles>
</project>
4) application.properties
quarkus.datasource.db-kind = mariadb
quarkus.datasource.username = admin
quarkus.datasource.password = admin
quarkus.datasource.jdbc.url = jdbc:mariadb://localhost:5432/mydatabase
quarkus.hibernate-orm.database.generation = drop-and-create
When I run this with the native maven profile (mvn clean package -Pnative) I get:
Fatal error: com.oracle.graal.pointsto.util.AnalysisError$ParsingError: Error encountered while parsing com.oracle.svm.reflect.Class_getNestHost_d0409f1154f6242e625526eadd05fbcd60e7d7e9.invoke(java.lang.Object, java.lang.Object[])
Parsing context:
parsing java.lang.reflect.Method.invoke(Method.java:566)
parsing javax.enterprise.util.AnnotationLiteral.invoke(AnnotationLiteral.java:288)
parsing javax.enterprise.util.AnnotationLiteral.getMemberValue(AnnotationLiteral.java:276)
parsing javax.enterprise.util.AnnotationLiteral.hashCode(AnnotationLiteral.java:246)
parsing org.graalvm.collections.EconomicMapImpl.getHashIndex(EconomicMapImpl.java:414)
[...]
Caused by: com.oracle.svm.hosted.substitute.DeletedElementException: Unsupported method java.lang.Class.getNestHost() is reachable: The declaring class of this element has been substituted, but this element is not present in the substitution class
[...]
To diagnose the issue, you can add the option --report-unsupported-elements-at-runtime.
Running it with --report-unsupported-elements-at-runtime didn't help too much either.
When I delete the MyEntity class, it successfully compiles as a native executable on my Mac +
Graalvm-ce-java11-20.0.0
Any idea what's wrong here?
I update to 20.0.0 and everything works perfectly. I just found only one way to hit that error and that is if you omit to set properly GraalVM. The environment variables in my mac are:
export GRAALVM_HOME=/Library/Java/JavaVirtualMachines/graalvm-ce-java11-20.0.0/Contents/Home
export JAVA_HOME=${GRAALVM_HOME}
export PATH=${GRAALVM_HOME}/bin:$PATH
Let me know if that works for you.

Apache beam 2.2 dependency not able to get the data from cloud storage

This is my code to read csv
//DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
PipelineOptions options=PipelineOptionsFactory.create();
//options.setProject("ProjectId");
//options.setStagingLocation("gs://bucketname/Object");
options.setRunner(DirectRunner.class);
options.setTempLocation("gs://bucketname/Object");
Pipeline p = Pipeline.create(options);
p.apply(FileIO.match().filepattern("gs://bucketname/objectname.csv")).apply(FileIO.readMatches())
.apply(ParDo.of(new checkSplitter()));
p.run();
}
static class checkSplitter extends DoFn<ReadableFile, String> {
/**
*
*/
private static final long serialVersionUID = 1L;
int rown = 1;
String line;
#ProcessElement
public void processElement(ProcessContext c) throws Exception {
try (InputStream is = Channels.newInputStream(c.element().open())) {
BufferedReader bReader = new BufferedReader(new InputStreamReader(is));
while ((line = bReader.readLine()) != null) {
System.out.println(line);
}
}
}
}
My pom.xml:
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.1.0</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>beam-sdks-java-core</artifactId>
<version>2.2.0</version>
</dependency>
<dependency>
<groupId>org.apache.beam</groupId>
<artifactId>beam-runners-direct-java</artifactId>
<version>2.2.0</version>
<scope>runtime</scope>
</dependency>
<!-- slf4j API frontend binding with JUL backend -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.7</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-jdk14</artifactId>
<version>1.7.7</version>
</dependency>
<dependency>
<groupId>com.google.apis</groupId>
<artifactId>google-api-services-pubsub</artifactId>
<version>v1-rev12-1.20.0</version>
<exclusions>
<exclusion>
<artifactId>guava-jdk5</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.code.gson/gson -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.googlecode.json-simple/json-simple -->
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
<version>1.1.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-codec/commons-codec -->
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.9</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.cloud/google-cloud-bigquery -->
<dependency>
<groupId>com.google.cloud</groupId>
<artifactId>google-cloud-bigquery</artifactId>
<version>0.30.0-beta</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.7.5</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-io -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-io</artifactId>
<version>1.3.2</version>
</dependency>
<dependency>
<groupId>com.google.appengine.tools</groupId>
<artifactId>appengine-gcs-client</artifactId>
<version>0.6</version>
</dependency>
<!-- <dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.14</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-jdk14</artifactId>
<version>1.7.14</version>
</dependency> -->
<!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>20.0</version>
</dependency>
The error is :
Exception in thread "main" org.apache.beam.sdk.Pipeline$PipelineExecutionException: java.nio.file.InvalidPathException: Illegal char <:> at index 2: gs://bucketname/object.csv
at org.apache.beam.runners.direct.DirectRunner$DirectPipelineResult.waitUntilFinish(DirectRunner.java:344)
at org.apache.beam.runners.direct.DirectRunner$DirectPipelineResult.waitUntilFinish(DirectRunner.java:314)
at org.apache.beam.runners.direct.DirectRunner.run(DirectRunner.java:208)
at org.apache.beam.runners.direct.DirectRunner.run(DirectRunner.java:62)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:303)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:289)
at com.pearson.dataflow.StarterPipeline.main(StarterPipeline.java:107)
Caused by: java.nio.file.InvalidPathException: Illegal char <:> at index 2: gs://bucketname/object.csv
at sun.nio.fs.WindowsPathParser.normalize(WindowsPathParser.java:176)
at sun.nio.fs.WindowsPathParser.parse(WindowsPathParser.java:147)
at sun.nio.fs.WindowsPathParser.parse(WindowsPathParser.java:77)
at sun.nio.fs.WindowsPath.parse(WindowsPath.java:94)
at sun.nio.fs.WindowsFileSystem.getPath(WindowsFileSystem.java:255)
at java.nio.file.Paths.get(Paths.java:84)
at org.apache.beam.sdk.io.LocalFileSystem.matchOne(LocalFileSystem.java:219)
at org.apache.beam.sdk.io.LocalFileSystem.match(LocalFileSystem.java:89)
at org.apache.beam.sdk.io.FileSystems.match(FileSystems.java:125)
at org.apache.beam.sdk.io.FileSystems.match(FileSystems.java:147)
at org.apache.beam.sdk.io.FileSystems.match(FileSystems.java:159)
at org.apache.beam.sdk.io.FileIO$MatchAll$MatchFn.process(FileIO.java:341)
I have attached the the code and pom.xml with the error
so so many build in packages in 2.1 are not able to found in 2.2 please advice me or if there is any working example to run it in local please share me the link.
should i missed out any dependencies or any error in code please help me
help will be appreciated
Thanks in Advance

Unable to load OWL File using OWL API

Please help me out with this:
Have used Maven and tried loading Ontology file using OWL API..
Getting errors while running the file:
1st Error :
No implementation for
java.util.Set was
bound. while locating
java.util.Set
for parameter 0 at uk.ac.manchester.cs.owl.owlapi.OWLOntologyManagerImpl.setOntologyStorers(OWLOntologyManagerImpl.java:1279)
at
uk.ac.manchester.cs.owl.owlapi.OWLOntologyManagerImpl.setOntologyStorers(OWLOntologyManagerImpl.java:1279)
at uk.ac.manchester.cs.owl.owlapi.OWLAPIImplModule.configure(Unknown
Source)
2nd Error :
An exception was caught and reported. Message:
org.semanticweb.owlapi.manchestersyntax.parser.ManchesterOWLSyntaxOntologyParserFactory
cannot be cast to javax.inject.Provider at
org.semanticweb.owlapi.OWLAPIServiceLoaderModule.configure(Unknown
Source)
My code looks like:
File selectedFile = new File("E:\\Pallavi\\Ontology\\Food.owl");
OWLOntologyManager m = OWLManager.createOWLOntologyManager();
IRI inputDocumentIRI = IRI.create(selectedFile);
/* Load an ontology from a document IRI */
OWLOntology ontology = m.loadOntologyFromOntologyDocument(inputDocumentIRI);
/* Report information about the ontology */
System.out.println("Ontology Loaded...");
System.out.println("Document IRI: " + inputDocumentIRI);
System.out.println("Logical IRI : " + ontology.getOntologyID());
System.out.println("Format : " + m.getOntologyFormat(ontology));
m.removeOntology(ontology);
System.out.println("Done");
My pom.xml looks like:
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.mycompany</groupId>
<artifactId>TestOWL</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<build>
<plugins>
<plugin>
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<version>2.5.3</version>
<extensions>true</extensions>
<configuration>
<instructions>
<Implementation-Title>${project.name}</Implementation-Title>
<Implementation-Vendor>${project.organization.name}</Implementation-Vendor>
<Implementation-Version>${project.version}.${maven.build.timestamp}</Implementation-Version>
<Bundle-SymbolicName>org.semanticweb.owl.owlapi</Bundle-SymbolicName>
<Bundle-Version>${project.version}</Bundle-Version>
<excludeDependencies>groupId=com.google.guava;scope=compile|runtime|provided,
groupId=com.google.inject*;scope=compile|runtime|provided,
groupId=org.slf4j*;scope=compile|runtime|provided</excludeDependencies>
</instructions>
</configuration>
</plugin>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<version>2.3</version>
<executions>
<execution>
<phase>package</phase>
<configuration>
<artifactSet>
<excludes>
<exclude>org.apache.felix:org.osgi.core</exclude>
<exclude>org.openrdf.sesame:*</exclude>
<exclude>com.fasterxml.jackson.core:*</exclude>
<exclude>com.github.jsonld-java:*</exclude>
<exclude>com.fasterxml.jackson.core:*</exclude>
<exclude>org.apache.httpcomponents:*</exclude>
<exclude>commons-codec:commons-codec:*</exclude>
<exclude>org.slf4j:*</exclude>
<exclude>org.semarglproject:*</exclude>
<exclude>com.google.guava:*</exclude>
<exclude>com.google.inject:*</exclude>
<exclude>javax.inject:*</exclude>
<exclude>aopalliance:*</exclude>
<exclude>com.google.inject.extensions:*</exclude>
<exclude>com.google.code.findbugs:*</exclude>
<exclude>org.slf4j:slf4j-api</exclude>
<exclude>commons-io:*</exclude>
<exclude>org.tukaani:*</exclude>
<exclude>net.sf.trove4j:*</exclude>
</excludes>
</artifactSet>
<transformers>
<transformer/>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>com.github.ansell.owlapi</groupId>
<artifactId>owlapi-api</artifactId>
<version>3.4.6.2-ansell</version>
</dependency>
<dependency>
<groupId>net.sourceforge.owlapi</groupId>
<artifactId>owlapi-apibinding</artifactId>
<version>5.0.5</version>
</dependency>
<dependency>
<groupId>net.sourceforge.owlapi</groupId>
<artifactId>owlapi-osgidistribution</artifactId>
<version>5.0.5</version>
</dependency>
</dependencies>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
</project>
Please help me to get rid of these errors
You are excluding necessary dependencies for owlapi, which explains all the injection related errors.
On top of that, you're using owlapi 5 and the Ansell fork of owlapi 3. These will conflict in many areas.
If you are not using OSGi (seems no) drop all dependencies except owlapi-apibinding for 5.0.5 and remove all exclusions. If that does not solve the problem, update the question with the new state of affairs.

Resources