Execute lucene booleanquery in file huge problems - file

I 've a problem with my huge nquad file (about 4000 lines) when i execute a boolenquery,
i try a query as:
Query query1 = new TermQuery(new Term(FIELD_CONTENTS, "Albania"));
Query query2 = new TermQuery(new Term(FIELD_CONTENTS, "Hitchcock"));
BooleanQuery booleanQuery = new BooleanQuery();
booleanQuery.add(query1, BooleanClause.Occur.MUST);
booleanQuery.add(query2, BooleanClause.Occur.MUST);
This query performs correctly when the words that I try to search in the line number<780, then >780 failed.
This is a snippet of my nquad file:
<http://dbpedia.org/resource/A_Clockwork_Orange> <http://dbpedia.org/ontology/numberOfPages> "192"^^<http://www.w3.org/2001/XMLSchema#positiveInteger> <http://en.wikipedia.org/wiki/A_Clockwork_Orange?oldid=606117686#absolute-line=12> .
I make a custom analyzer for distinguer tokens:
import java.io.Reader;
import java.util.Set;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.standard.StandardFilter;
import org.apache.lucene.analysis.standard.StandardTokenizer;
class TestAnalyzer1 extends Analyzer {
public static final String[] TEST_STOP_WORDS = { "http", "https",
"resource", "foaf/0.1", "dbpedia.org", "en.wikipedia.org",
"xmlns.com", "purl.org", "elements/1.1",
"www.w3.org/2001/XMLSchema", "www.w3.org/1999/02/22-rdf",
"www.w3.org/2003/01", "oldid", "wiki" };
#SuppressWarnings("rawtypes")
private Set stopWords = StopFilter.makeStopSet(TEST_STOP_WORDS);
public TokenStream tokenStream(String fieldName, Reader reader) {
TokenStream ts = new StandardTokenizer(reader);
ts = new StandardFilter(ts);
ts = new StopFilter(ts, stopWords);
return ts;
}
}
This is main class:
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.Iterator;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermFreqVector;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Hit;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.LockObtainFailedException;
#SuppressWarnings("deprecation")
public class TestPreFinal {
public static final String FILES_TO_INDEX_DIRECTORY = "filesToIndex_1";
public static final String INDEX_DIRECTORY = "indexDirectory";
public static final String FIELD_PATH = "path";
public static final String FIELD_CONTENTS = "contents";
public static void main(String[] args) throws CorruptIndexException,
LockObtainFailedException, IOException, ParseException {
long startTime = System.currentTimeMillis();
Analyzer analyzer = new TestAnalyzer1();
IndexWriter indexWriter = new IndexWriter(INDEX_DIRECTORY, analyzer,
true);
File dir = new File(FILES_TO_INDEX_DIRECTORY);
File[] files = dir.listFiles();
for (File file : files) {
Reader reader = new FileReader(file);
Document document = new Document();
String path = file.getCanonicalPath();
Field fieldPath = new Field(FIELD_PATH, path, Field.Store.YES,
Field.Index.UN_TOKENIZED);
Field fieldContents = new Field(FIELD_CONTENTS, reader,
Field.TermVector.WITH_POSITIONS_OFFSETS);
document.add(fieldPath);
document.add(fieldContents);
indexWriter.addDocument(document);
}
indexWriter.commit();
indexWriter.close();
Directory directory = FSDirectory.getDirectory(INDEX_DIRECTORY);
IndexSearcher indexSearcher = new IndexSearcher(directory);
IndexReader indexReader = IndexReader.open(directory);
Query query1 = new TermQuery(new Term(FIELD_CONTENTS, "Albania"));
Query query2 = new TermQuery(new Term(FIELD_CONTENTS, "Hitchcock"));
BooleanQuery booleanQuery = new BooleanQuery();
booleanQuery.add(query1, BooleanClause.Occur.MUST);
booleanQuery.add(query2, BooleanClause.Occur.MUST);
Hits hits = indexSearcher.search(booleanQuery);
#SuppressWarnings({ "unchecked" })
Iterator<Hit> it = hits.iterator();
TermFreqVector tfv = null;
while (it.hasNext()) {
Hit hit = it.next();
Document document = hit.getDocument();
String path = document.get(FIELD_PATH);
System.out.println("Hit: " + path);
}
for (int i = 0; i < hits.length(); i++) {
tfv = indexReader.getTermFreqVector(i, FIELD_CONTENTS);
System.out.println(tfv);
}
}
}
I do not know what else to do. You can help please. Thanks in advance.

Related

Quarkus Multiple File Upload

Hi i'm trying to upload multiple files using multipart form
I use this but i get Bad Request Status, how can i upload multiple files?
public class AttachmentBody {
#FormParam("files")
#PartType(MediaType.APPLICATION_OCTET_STREAM)
public InputStream[] files;
}
I was working in a part, I thought it would be helpful for multiple files upload. I am using RestEasy and Quarkus framework. Find below the code.
import java.io.File;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import org.apache.commons.io.IOUtils;
import org.jboss.resteasy.annotations.providers.multipart.MultipartForm;
import org.jboss.resteasy.plugins.providers.multipart.InputPart;
import org.jboss.resteasy.plugins.providers.multipart.MultipartFormDataInput;
#Path("/multiupload")
public class MultiFileUploadController {
private static String UPLOAD_DIR = "E:/sure-delete";
#POST
#Path("/files")
#Consumes(MediaType.MULTIPART_FORM_DATA)
#Produces(MediaType.TEXT_PLAIN)
public Response handleFileUploadForm(#MultipartForm MultipartFormDataInput input) {
Map<String, List<InputPart>> uploadForm = input.getFormDataMap();
List<String> fileNames = new ArrayList<>();
List<InputPart> inputParts = uploadForm.get("file");
System.out.println("inputParts size: " + inputParts.size());
String fileName = null;
for (InputPart inputPart : inputParts) {
try {
MultivaluedMap<String, String> header = inputPart.getHeaders();
fileName = getFileName(header);
fileNames.add(fileName);
System.out.println("File Name: " + fileName);
InputStream inputStream = inputPart.getBody(InputStream.class, null);
byte[] bytes = IOUtils.toByteArray(inputStream);
File customDir = new File(UPLOAD_DIR);
fileName = customDir.getAbsolutePath() + File.separator + fileName;
Files.write(Paths.get(fileName), bytes, StandardOpenOption.CREATE_NEW);
} catch (Exception e) {
e.printStackTrace();
}
}
String uploadedFileNames = String.join(", ", fileNames);
return Response.ok().entity("All files " + uploadedFileNames + " successfully.").build();
}
private String getFileName(MultivaluedMap<String, String> header) {
String[] contentDisposition = header.getFirst("Content-Disposition").split(";");
for (String filename : contentDisposition) {
if ((filename.trim().startsWith("filename"))) {
String[] name = filename.split("=");
String finalFileName = name[1].trim().replaceAll("\"", "");
return finalFileName;
}
}
return "unknown";
}
}
To test from the postman client, find below the image.
You can take it as an example also handle the exception.

Apache beam 2.1.0: Unable to upload to Datastore after following example

I'm having trouble uploading entities to the Cloud Datastore via the Apache Beam Java SDK (2.1.0). The following is my code:
import com.google.cloud.datastore.DatastoreOptions
import com.google.cloud.datastore.Entity
import com.opencsv.CSVParser
import org.apache.beam.runners.dataflow.DataflowRunner
import
org.apache.beam.runners.dataflow.options.DataflowPipelineOptions
import org.apache.beam.sdk.Pipeline
import org.apache.beam.sdk.io.TextIO
import org.apache.beam.sdk.io.gcp.datastore.DatastoreIO
import org.apache.beam.sdk.options.PipelineOptionsFactory
import org.apache.beam.sdk.transforms.DoFn
import org.apache.beam.sdk.transforms.MapElements
import org.apache.beam.sdk.transforms.ParDo
import org.apache.beam.sdk.transforms.SimpleFunction
import java.io.Serializable
object PipelineClass {
class FoodGroup(var id: String? = null,
var group: String? = null) : Serializable
class CreateGroupsFn : SimpleFunction<String, FoodGroup>() {
override fun apply(line: String?): FoodGroup {
val group = FoodGroup()
val parser = CSVParser()
val parts = parser.parseLine(line)
group.id = parts[0].trim()
group.group = parts[1].trim()
return group
}
}
class CreateEntitiesFn : DoFn<FoodGroup, Entity>() {
#ProcessElement
fun processElement(c: ProcessContext) {
val datastore = DatastoreOptions.getDefaultInstance().service
val keyFactory = datastore.newKeyFactory()
.setKind("FoodGroup")
.setNamespace("nutrients")
val key = datastore.allocateId(keyFactory.newKey())
val entity = Entity.newBuilder(key)
.set("id", c.element().id)
.set("group", c.element().group)
.build()
c.output(entity)
}
}
#JvmStatic fun main(args: Array<String>) {
val options =
PipelineOptionsFactory.`as`(DataflowPipelineOptions::class.java)
options.runner = DataflowRunner::class.java
options.project = "simplesample"
options.jobName = "fgUpload"
val pipeline = Pipeline.create(options)
pipeline.apply(TextIO.read().from("gs://bucket/foodgroup.csv"))
.apply(MapElements.via(CreateGroupsFn()))
.apply(ParDo.of(CreateEntitiesFn()))
//error occurs below...
.apply(DatastoreIO.v1().write()
.withProjectId(options.project))
pipeline.run()
}
}
The following is the error I get:
PipelineClass.kt: (75, 24): Type mismatch: inferred type is
DatastoreV1.Write! but PTransform<in PCollection<Entity!>!, PDone!>!
was expected
I've tried SimpleFunction, DoFn, and PTransform (composite and non-composite) to do the transform from String to Entity with no success.
What am I doing wrong?
EDIT: I've finally managed to get my entities in the Datastore. I decided to use Dataflow 1.9.1 and ditched Beam (2.1.0) after seeing this example. Below is my code:
import com.google.cloud.dataflow.sdk.Pipeline;
import com.google.cloud.dataflow.sdk.io.TextIO;
import com.google.cloud.dataflow.sdk.io.datastore.DatastoreIO;
import com.google.cloud.dataflow.sdk.options.Default;
import com.google.cloud.dataflow.sdk.options.Description;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory;
import com.google.cloud.dataflow.sdk.transforms.DoFn;
import com.google.cloud.dataflow.sdk.transforms.ParDo;
import com.google.datastore.v1.Entity;
import com.google.datastore.v1.Key;
import com.opencsv.CSVParser;
import javax.annotation.Nullable;
import java.util.UUID;
import static com.google.datastore.v1.client.DatastoreHelper.makeKey;
import static
com.google.datastore.v1.client.DatastoreHelper.makeValue;
public class PipelineClass {
static class CreateEntitiesFn extends DoFn<String, Entity> {
private final String namespace;
private final String kind;
private final Key ancestorKey;
CreateEntitiesFn(String namespace, String kind) {
this.namespace = namespace;
this.kind = kind;
ancestorKey = makeAncestorKey(namespace, kind);
}
Entity makeEntity(String id, String group) {
Entity.Builder entityBuilder = Entity.newBuilder();
Key.Builder keyBuilder = makeKey(ancestorKey, kind,
UUID.randomUUID().toString());
if (namespace != null) {
keyBuilder.getPartitionIdBuilder().setNamespaceId(namespace);
}
entityBuilder.setKey(keyBuilder.build());
entityBuilder.getMutableProperties().put("id",
makeValue(id).build());
entityBuilder.getMutableProperties().put("group",
makeValue(group).build());
return entityBuilder.build();
}
#Override
public void processElement(ProcessContext c) throws Exception {
CSVParser parser = new CSVParser();
String[] parts = parser.parseLine(c.element());
String id = parts[0];
String group = parts[1];
c.output(makeEntity(id, group));
}
}
static Key makeAncestorKey(#Nullable String namespace, String kind) {
Key.Builder keyBuilder = makeKey(kind, "root");
if (namespace != null) {
keyBuilder.getPartitionIdBuilder().setNamespaceId(namespace);
}
return keyBuilder.build();
}
public interface Options extends PipelineOptions {
#Description("Path of the file to read from and store to Cloud
Datastore")
#Default.String("gs://bucket/foodgroup.csv")
String getInput();
void setInput(String value);
#Description("Dataset ID to read from Cloud Datastore")
#Default.String("simplesample")
String getProject();
void setProject(String value);
#Description("Cloud Datastore Entity Kind")
#Default.String("FoodGroup")
String getKind();
void setKind(String value);
#Description("Dataset namespace")
#Default.String("nutrients")
String getNamespace();
void setNamespace(#Nullable String value);
#Description("Number of output shards")
#Default.Integer(0)
int getNumShards();
void setNumShards(int value);
}
public static void main(String args[]) {
PipelineOptionsFactory.register(Options.class);
Options options =
PipelineOptionsFactory.fromArgs(args).as(Options.class);
Pipeline p = Pipeline.create(options);
p.apply(TextIO.Read.named("ReadLines").from(options.getInput()))
.apply(ParDo.named("CreateEntities").of(new
CreateEntitiesFn(options.getNamespace(), options.getKind())))
.apply(DatastoreIO.v1().write().withProjectId(options.getProject()));
p.run();
}
}

JavaFX How can I make a line chart out of an observable list?

I have been playing around with the line chart tutorial here: http://docs.oracle.com/javase/8/javafx/user-interface-tutorial/line-chart.htm#CIHGBCFI
I want to extend the tutorial and attempt to build a line chart out of data from a database, instead of setting data like this:
series.getData().add(new XYChart.Data(1, 23));
series.getData().add(new XYChart.Data(2, 14));
What I have to import data from database:
public ObservableList<Items> loadChart() {
try {
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("SELECT cost, date FROM Items ORDER BY date ASC");
ObservableList<Items> data = FXCollections.observableArrayList();
while(rs.next()){
Items items = new Items();
items.setCost(rs.getInt(1));
items.setDate(rs.getString(2));
data.add(items);
}
return data;
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
I want to plot cost and date against in a line chart... Yet don't know how to do so (based on observable list and the tutorial).
If you are using that sample, your code should look something like this.
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.logging.Level;
import java.util.logging.Logger;
import javafx.application.Application;
import javafx.scene.Scene;
import javafx.scene.chart.LineChart;
import javafx.scene.chart.NumberAxis;
import javafx.scene.chart.XYChart;
import javafx.stage.Stage;
public class LineChartSample extends Application {
#Override public void start(Stage stage) {
stage.setTitle("Line Chart Sample");
//defining the axes
final NumberAxis xAxis = new NumberAxis();
final NumberAxis yAxis = new NumberAxis();
xAxis.setLabel("Number of Month");
//creating the chart
final LineChart<Number,Number> lineChart =
new LineChart<Number,Number>(xAxis,yAxis);
lineChart.setTitle("Stock Monitoring, 2010");
//defining a series
XYChart.Series series = new XYChart.Series();
series.setName("My portfolio");
//populating the series with data
try
{
Connection connection = DriverManager.getConnection("...");//You can use try with resources. Establish a Connection
Statement stmt = connection.createStatement();//Create Statement
ResultSet rs = stmt.executeQuery("SELECT cost, date FROM Items ORDER BY date ASC");//Query DB and get results.
//Iterate through results.
while(rs.next())
{
series.getData().add(new XYChart.Data(rs.getInt(1), Integer.parseInt(rs.getString(2))));//Add data to Chart. Changed the second input to Integer due to LineChart<Number,Number>. This should work, though I haven't tested it.
}
}
catch (SQLException ex) {
Logger.getLogger(LineChartSample.class.getName()).log(Level.SEVERE, null, ex);
}
Scene scene = new Scene(lineChart,800,600);
lineChart.getData().add(series);
stage.setScene(scene);
stage.show();
}
public static void main(String[] args) {
launch(args);
}
}
UPDATE BELOW
You can try something like this if you want to have a database handler class.
DBHandler Class
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javafx.scene.chart.XYChart;
/**
*
* #author blj0011
*/
public class DBHandler
{
List<List<Integer>> dataHolder;
//There is probably a better way to structure this DBHandler Class
public DBHandler()
{
dataHolder = new ArrayList();
try
{
Connection connection = DriverManager.getConnection("...");//You can use try with resources. Establish a Connection
Statement stmt = connection.createStatement();//Create Statement
ResultSet rs = stmt.executeQuery("SELECT cost, date FROM Items ORDER BY date ASC");//Query DB and get results.
//Iterate through results.
while(rs.next())
{
List<Integer> tempDataHolder = new ArrayList();
tempDataHolder.add(rs.getInt(1));
tempDataHolder.add(Integer.parseInt(rs.getString(2)));
dataHolder.add(tempDataHolder);
}
}
catch (SQLException ex) {
Logger.getLogger(DBHandler.class.getName()).log(Level.SEVERE, null, ex);
}
}
public List<List<Integer>> getDataHolder()
{
return dataHolder;
}
}
Main Class
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javafx.application.Application;
import javafx.scene.Scene;
import javafx.scene.chart.LineChart;
import javafx.scene.chart.NumberAxis;
import javafx.scene.chart.XYChart;
import javafx.stage.Stage;
public class LineChartSample extends Application {
#Override public void start(Stage stage) {
stage.setTitle("Line Chart Sample");
//defining the axes
final NumberAxis xAxis = new NumberAxis();
final NumberAxis yAxis = new NumberAxis();
xAxis.setLabel("Number of Month");
//creating the chart
final LineChart<Number,Number> lineChart =
new LineChart<Number,Number>(xAxis,yAxis);
lineChart.setTitle("Stock Monitoring, 2010");
//defining a series
XYChart.Series series = new XYChart.Series();
series.setName("My portfolio");
//populating the series with data
DBHandler dbHandler = new DBHandler();
List<List<Integer>> dataHolder = dbHandler.getDataHolder();
for(int i = 0; i < dataHolder.size(); i++)
{
series.getData().add(new XYChart.Data(dataHolder.get(i).get(0), dataHolder.get(i).get(1)));
}
Scene scene = new Scene(lineChart,800,600);
lineChart.getData().add(series);
stage.setScene(scene);
stage.show();
}
public static void main(String[] args) {
launch(args);
}
}

Selenium Webdriver - Apache POI - XSSF - 2nd loop is not getting executed

I’m using Apache POI to read data from multiple excel but my code doesn’t recognize the value from 2nd excel, It’s not even getting inside my 2nd FOR loop
Excel 1 – Test Data to login
Excel 2 – Course Details
My Login credentials is present in the 1st excel, It has 5 data in it. I would like to login with that user and add course for that user. Appreciate your help on this!!
Note # please suggest if you have any concerns on this piece of code
Below is the code that I’m using
package M1.Testcases;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.support.PageFactory;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import Helper.BrowserFactory;
import M1.Pages.Add_Course;
import M1.Pages.LoginPage;
public class CopyOfVerifyAdd_Course
{
WebDriver driver;
#Test
#Parameters("browser")
public void checkValidUser(String browser) throws IOException, InterruptedException
{
try
{
FileInputStream file = new FileInputStream(new File("File1Path"));
XSSFWorkbook workbook = new XSSFWorkbook(file);
XSSFSheet sheet = workbook.getSheetAt(0);
FileInputStream file1 = new FileInputStream(new File("File2Path"));
XSSFWorkbook workbook1 = new XSSFWorkbook(file1);
XSSFSheet sheet1 = workbook1.getSheetAt(0);
String url= "1st URL";
for (int i=1; i <= sheet.getLastRowNum(); i++)
{
String uid = sheet.getRow(i).getCell(0).getStringCellValue();
String pass = sheet.getRow(i).getCell(1).getStringCellValue();
WebDriver driver=BrowserFactory.startBrowser(browser,url);
LoginPage login_page=PageFactory.initElements(driver, LoginPage.class);
login_page.login_MI(uid,pass);
driver.get("2nd URL");
driver.manage().timeouts().implicitlyWait(10000, TimeUnit.MILLISECONDS);
for (int j=1; j <= sheet1.getLastRowNum(); j++)
{
Thread.sleep(10000);
String course_title = sheet1.getRow(j).getCell(0).getStringCellValue();
String course_code = sheet1.getRow(j).getCell(1).getStringCellValue();
String course_cat_desc = sheet1.getRow(j).getCell(2).getStringCellValue();
String course_full_desc = sheet1.getRow(j).getCell(3).getStringCellValue();
String course_price = sheet1.getRow(j).getCell(4).getStringCellValue();
String course_target_audience = sheet1.getRow(j).getCell(5).getStringCellValue();
String course_learner_assess = sheet1.getRow(j).getCell(6).getStringCellValue();
String course_copyright = sheet1.getRow(j).getCell(7).getStringCellValue();
String course_release_date = sheet1.getRow(j).getCell(8).getStringCellValue();
String course_review_date = sheet1.getRow(j).getCell(9).getStringCellValue();
String course_expiration_date = sheet1.getRow(j).getCell(10).getStringCellValue();
String course_state_req = sheet1.getRow(j).getCell(11).getStringCellValue();
String course_bibi = sheet1.getRow(j).getCell(12).getStringCellValue();
Add_Course add_course_page=PageFactory.initElements(driver, Add_Course.class);
driver.switchTo().frame(course_cat_desc);
driver.manage().timeouts().implicitlyWait(2000, TimeUnit.MILLISECONDS);
driver.switchTo().frame(course_full_desc);
driver.manage().timeouts().implicitlyWait(2000, TimeUnit.MILLISECONDS);
driver.switchTo().frame(course_target_audience);
driver.manage().timeouts().implicitlyWait(2000, TimeUnit.MILLISECONDS);
driver.switchTo().frame(course_learner_assess);
driver.manage().timeouts().implicitlyWait(2000, TimeUnit.MILLISECONDS);
driver.switchTo().frame(course_state_req);
driver.manage().timeouts().implicitlyWait(2000, TimeUnit.MILLISECONDS);
driver.switchTo().frame(course_bibi);
driver.manage().timeouts().implicitlyWait(5000, TimeUnit.MILLISECONDS);
add_course_page.AddCourse(course_title,course_code,course_cat_desc,course_full_desc,course_price,course_target_audience,course_learner_assess,
course_copyright,course_release_date,course_review_date,course_expiration_date,course_state_req,course_bibi);
Thread.sleep(10000);
}
driver.manage().deleteAllCookies();
driver.quit();
}
workbook.close();
file.close();
workbook1.close();
file1.close();
}
catch (FileNotFoundException fnfe)
{
fnfe.printStackTrace();
}
}
}

MULE HTTP listener attachment with rest web service

I am New to MULE ESB I am tryting to handle file attachment with http listener using rest web service.
I am created a simple flow but dont know how to handle attachment in mule to pass rest ful web service .
Any help greatly appreciated!!
:(
Given is simple flow waht i am assuming to be work !!
rest web service code ::
package com.one.file;
import java.io.File;
import java.util.Iterator;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileItemFactory;
import org.apache.commons.fileupload.FileUploadException;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
#Path("/upload")
public class RESTMultipleFileUpload {
private static final String FILE_UPLOAD_PATH = "C:\\Users\\charan\\Documents\\webservice\\";
//private static final String CANDIDATE_NAME = "candidateName";
private static final String SUCCESS_RESPONSE = "Successful";
private static final String FAILED_RESPONSE = "Failed";
#POST
#Consumes(MediaType.MULTIPART_FORM_DATA)
#Produces("text/plain")
#Path("/multipleFiles")
public String registerWebService(#Context HttpServletRequest request)
{
String responseStatus = SUCCESS_RESPONSE;
String candidateName = null;
System.out.println("first ");
System.out.println("Two::"+request);
//checks whether there is a file upload request or not
if (ServletFileUpload.isMultipartContent(request))
{
final FileItemFactory factory = new DiskFileItemFactory();
final ServletFileUpload fileUpload = new ServletFileUpload(factory);
try
{
System.out.println("t ");
/*
* parseRequest returns a list of FileItem
* but in old (pre-java5) style
*/
final List items = fileUpload.parseRequest(request);
if (items != null)
{
final Iterator iter = items.iterator();
while (iter.hasNext())
{
final FileItem item = (FileItem) iter.next();
final String itemName = item.getName();
final String fieldName = item.getFieldName();
final String fieldValue = item.getString();
if (item.isFormField())
{
candidateName = fieldValue;
System.out.println("Field Name: " + fieldName + ", Field Value: " + fieldValue);
System.out.println("Candidate Name: " + candidateName);
}
else
{
final File savedFile = new File(FILE_UPLOAD_PATH + File.separator
+ itemName);
System.out.println("Saving the file: " + savedFile.getName());
item.write(savedFile);
}
}
}
}
catch (FileUploadException fue)
{
responseStatus = FAILED_RESPONSE;
fue.printStackTrace();
}
catch (Exception e)
{
responseStatus = FAILED_RESPONSE;
e.printStackTrace();
}
}
System.out.println("Returned Response Status: " + responseStatus);
return responseStatus;
}
}

Resources