I have a trending application. The app gets list of trends from Tweeter every an hour and put it in data store (without checking for duplication based on name). Therefore, for a particular word such as "Thaipusam" I'll have so many objects that their only difference is date which has been calculated by long date = new Date().getTime(); whenever background process want to push it to data store.
Each object is like this:
{
id: 4749890231992320,
name: "Thaipusam",
url: "http://twitter.com/search?q=Thaipusam",
query: "Thaipusam",
location: "Kuala Lumpur",
woeid: 1154781,
date: 1389865326440
}
Since I have over 5000 objects in the datastore, I need to know how many times each object searched per day.
What is the best/efficient practice to calculate/query repeatation per day?
I have no idea it's the most efficient way or not but I did this in my servlet and result is satisfactory:
private static final long MILLIS_PER_DAY = 24 * 60 * 60 * 1000;
private static final long MILLIS_PER_WEEK = 7 * MILLIS_PER_DAY;
private static final long MILLIS_PER_MONTH = 30 * MILLIS_PER_DAY;
private static final long MILLIS_PER_YEAR = 365 * MILLIS_PER_DAY;
private static final String QUERY_PER_PERIOD = "SELECT m.date FROM TwitterTrendsJPA As m WHERE m.name = :keyword1 AND m.date >= :keyword2";
in doGet() method:
long currentTime = new Date().getTime();
long last24hours = currentTime - MILLIS_PER_DAY;
long last7days = currentTime - MILLIS_PER_WEEK;
long last30days = currentTime - MILLIS_PER_MONTH;
long last365days = currentTime - MILLIS_PER_YEAR;
#SuppressWarnings("unchecked")
List<Long> statList;
Statistic statistic = new Statistic();
Query query1 = em.createQuery(QUERY_PER_PERIOD);
query1.setParameter("keyword1", p2);
query1.setParameter("keyword2", last24hours);
statList = (List<Long>) query1.getResultList();
statistic.setPerDay(statList);
Query query2 = em.createQuery(QUERY_PER_PERIOD);
query2.setParameter("keyword1", p2);
query2.setParameter("keyword2", last7days);
statList = (List<Long>) query2.getResultList();
statistic.setPerWeek(statList);
Query query3 = em.createQuery(QUERY_PER_PERIOD);
query3.setParameter("keyword1", p2);
query3.setParameter("keyword2", last30days);
statList = (List<Long>) query3.getResultList();
statistic.setPerMonth(statList);
Query query4 = em.createQuery(QUERY_PER_PERIOD);
query4.setParameter("keyword1", p2);
query4.setParameter("keyword2", last365days);
statList = (List<Long>) query4.getResultList();
statistic.setPerYear(statList);
and Statistic class is like this:
private final class Statistic {
private List<Long> perDay;
private List<Long> perWeek;
private List<Long> perMonth;
private List<Long> perYear;
public List<Long> getPerDay() {
return perDay;
}
public void setPerDay(List<Long> perDay) {
this.perDay = perDay;
}
public List<Long> getPerWeek() {
return perWeek;
}
public void setPerWeek(List<Long> perWeek) {
this.perWeek = perWeek;
}
public List<Long> getPerMonth() {
return perMonth;
}
public void setPerMonth(List<Long> perMonth) {
this.perMonth = perMonth;
}
public List<Long> getPerYear() {
return perYear;
}
public void setPerYear(List<Long> perYear) {
this.perYear = perYear;
}
}
Related
I am working on following program and have set WatermarkStrategy however when I run the program using inEventTime() method on pattern it does not give any output.
Note : the same program works when I use inProcessingTime() on pattern.
public class FlinkCEPTest {
#SuppressWarnings("deprecation")
public static void main(String[] args) throws Exception {
ParameterTool parameter = ParameterTool.fromArgs(args);
final String bootstrapServers = parameter.get("kafka.broker", "localhost:9092,broker:29092");
final String inputTopic_1 = parameter.get("input.topic.1","acctopic");
final String inputTopic_2 = parameter.get("input.topic.2","txntopic");
final String outputTopic = parameter.get("output.topic.q","alerttopic");
final String groupID = parameter.get("group.id","flink-demo-grp-id");
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
KafkaSource<EventMessage> source_1 = KafkaSource.<EventMessage>builder()
.setBootstrapServers(bootstrapServers)
.setTopics(inputTopic_1).setGroupId(groupID)
.setStartingOffsets(OffsetsInitializer.latest())
.setDeserializer(new EventSchema())
.build();
DataStream<EventMessage> text_1 = env.fromSource(source_1,
WatermarkStrategy
.<EventMessage>forBoundedOutOfOrderness(Duration.ofSeconds(300))
.withTimestampAssigner((event, trtimestamp)-> {
//System.err.println("Kafka ingetstion ts : " + trtimestamp);
//System.err.println("Event ts : "+ event.getTxnDate().getTime());
return event.getTxnDate().getTime();})
, "Kafka Source 1");
DataStream<EventMessage> partitionedInput = text_1.keyBy(evt -> evt.getAccountId());
//partitionedInput.print();
Pattern<EventMessage, ?> relaxedAlarmPattern = Pattern.<EventMessage>begin("first").subtype(EventMessage.class)
.where(new SimpleCondition<EventMessage>() {
private static final long serialVersionUID = 1L;
#Override
public boolean filter(EventMessage value) throws Exception {
return value.getEvent().equalsIgnoreCase("PASSWORD_CHANGE_SUCC");
}
}).followedBy("second").subtype(EventMessage.class).where(new IterativeCondition<EventMessage>() {
private static final long serialVersionUID = 1L;
#Override
public boolean filter(EventMessage value, Context<EventMessage> ctx) throws Exception {
Iterable<EventMessage> test = ctx.getEventsForPattern("first");
Integer accid = 0;
for (EventMessage te : test) {
accid = te.getAccountId();
}
return value.getEvent().equalsIgnoreCase("BENIFICIARY_ADDED")
&& value.getAccountId().equals(accid);
}
}).followedBy("third").subtype(EventMessage.class).where(new IterativeCondition<EventMessage>() {
private static final long serialVersionUID = 1L;
#Override
public boolean filter(EventMessage value, Context<EventMessage> ctx) throws Exception {
Integer accid = 0;
Iterable<EventMessage> test = ctx.getEventsForPattern("first");
for (EventMessage te : test) {
accid = te.getAccountId();
}
return value.getEvent().equalsIgnoreCase("TXN_NEW")
&& value.getAccountId().equals(accid) && value.getAmt() <= 10;
}
}).followedBy("last").subtype(EventMessage.class).where(new IterativeCondition<EventMessage>() {
private static final long serialVersionUID = 1L;
#Override
public boolean filter(EventMessage value, Context<EventMessage> ctx) throws Exception {
Integer accid = 0;
Iterable<EventMessage> test = ctx.getEventsForPattern("first");
for (EventMessage te : test) {
accid = te.getAccountId();
}
return value.getEvent().equalsIgnoreCase("TXN_NEW")
&& value.getAccountId().equals(accid) && value.getAmt() >= 100 ;
}
}).within(Time.seconds(300));
PatternStream<EventMessage> patternStream = CEP.pattern(partitionedInput, relaxedAlarmPattern)
.inEventTime();
//.inProcessingTime();
DataStream<String> alarms = patternStream.select(new PatternSelectFunction<EventMessage, String>() {
private static final long serialVersionUID = 1L;
#Override
public String select(Map<String, List<EventMessage>> pattern) throws Exception {
EventMessage first = (EventMessage) pattern.get("first").get(0);
EventMessage middle = (EventMessage) pattern.get("second").get(0);
EventMessage third = (EventMessage) pattern.get("third").get(0);
EventMessage last = (EventMessage) pattern.get("last").get(0);
return "WARNING : Possible fraud scenario [ Party ID " + first.getPartyId()
+ " recently changed his password and added a beneficiary and later made transcations of "
+ third.getAmt() + " and " + last.getAmt()+" ]";
}
});
alarms.print();
env.execute(" CEP ");
}
}
If I change the following line
PatternStream<EventMessage> patternStream = CEP.pattern(partitionedInput, relaxedAlarmPattern).inEventTime();
To
PatternStream<EventMessage> patternStream = CEP.pattern(partitionedInput, relaxedAlarmPattern).inProcessingTime();
The code works,any suggestions how can I make it work with inEventTime() method.
Usually with Kafka sources the issue is that the parallelism is higher than the number of partitions or not all partitions receive data which doesn't let the watermarks advance forward. You can solve this by adjusting the parallelism or use withIdleness with your watermark strategy.
See more info in the Kafka connector docs.
I am trying to generate aggregates on a Streaming Source and when i try to run the Table API queries i am getting the following Error.
AppendStreamTableSink doesn't support consuming update changes which is produced by node GroupAggregate
I am consuming the data from a Kafka Topic. Here is the Unit Test i am to mimic that behavior.
msg_type_1,Site_1,09/10/2020,00:00:00.037
msg_type_2,Site_1,09/10/2020,00:00:00.037
msg_type_1,Site_2,09/10/2020,00:00:00.037
msg_type_1,Site_3,09/10/2020,00:00:00.037
msg_type_1,Site_4,09/10/2020,00:00:00.037
msg_type_1,Site_5,09/10/2020,00:00:00.037
msg_type_1,Site_1,09/10/2020,00:00:00.037
msg_type_2,Site_1,09/10/2020,00:00:00.037
msg_type_3,Site_2,09/10/2020,00:00:00.037
msg_type_4,Site_1,09/10/2020,00:10:00.037
msg_type_1,Site_3,09/10/2020,00:10:00.037
msg_type_2,Site_1,09/10/2020,00:10:00.037
msg_type_3,Site_4,09/10/2020,00:10:00.037
msg_type_4,Site_1,09/10/2020,00:10:00.037
msg_type_1,Site_4,09/10/2020,00:10:00.037
msg_type_2,Site_5,09/10/2020,00:10:00.037
msg_type_4,Site_5,09/10/2020,00:10:00.037
msg_type_6,Site_5,09/10/2020,00:10:00.037
And here is the Unit Test i have for the aggregation.
#Test
public void loadSampleMessageFile() {
System.out.println(".loadSampleMessageFile() : ");
try {
String[] args = {};
StreamExecutionEnvironment streamingExecutionEnv = null;
streamingExecutionEnv = StreamExecutionEnvironment.getExecutionEnvironment();
streamingExecutionEnv.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime);
//streamingExecutionEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
ExecutionConfig config = streamingExecutionEnv.getConfig();
final ParameterTool paramTool = ParameterTool.fromArgs(args);
for (int index = 0; index < args.length; index++) {
logger.info("Argument =" + index + " Value" + args[index]);
}
streamingExecutionEnv.getConfig().setGlobalJobParameters(paramTool);
StreamTableEnvironment streamTableEnv = StreamTableEnvironment.create(streamingExecutionEnv);
SingleOutputStreamOperator<SampleMessage> dataStreamSource = streamingExecutionEnv
.readTextFile("C:\\temp\\sample_data.txt")
.map(new MapFunction<String, SampleMessage>() {
#Override
public SampleMessage map(String value) throws Exception {
return sampleMessageParser.parseMessage(value, null);
}
});
streamTableEnv.createTemporaryView("messages", dataStreamSource);
Table messagesTable = streamTableEnv.fromDataStream(dataStreamSource);
System.out.println("No.of Columns in Table =" + messagesTable.getSchema().getFieldCount());
logger.info("No.of Columns in Table =" + messagesTable.getSchema().getFieldCount());
for (int index = 0; index < messagesTable.getSchema().getFieldNames().length; index++) {
System.out.println("Field Name [" + index + "] = " + messagesTable.getSchema().getFieldNames()[index]);
}
TableResult distinctSiteResult = messagesTable.distinct().select($("site")).execute();
CloseableIterator distinctSiteResultIter = distinctSiteResult.collect();
int counter = 0;
List<String> sites = new ArrayList<>();
while (distinctSiteResultIter.hasNext()) {
sites.add((String) distinctSiteResultIter.next());
counter++;
}
System.out.println("Total No.of Distinct Sites =" + counter);
}
catch(Exception e){
e.printStackTrace();
}
}
And the support classes.
public class SampleMessage implements Serializable {
private String msgType;
private String site;
private Long timestamp;
public String getMsgType() {
return msgType;
}
public void setMsgType(String msgType) {
this.msgType = msgType;
}
public String getSite() {
return site;
}
public void setSite(String site) {
this.site = site;
}
public Long getTimestamp() {
return timestamp;
}
public void setTimestamp(Long timestamp) {
this.timestamp = timestamp;
}
public String toString(){
StringBuilder str = new StringBuilder();
str.append("SampleMessage[");
str.append(" msgType=");
str.append(msgType);
str.append(" site=");
str.append(site);
str.append(" timestamp=");
str.append(timestamp);
str.append(" ]");
return str.toString();
}
}
And here is the error i am getting.
.loadSampleMessageFile() :
No.of Columns in Table =3
Field Name [0] = msgType
Field Name [1] = site
Field Name [2] = timestamp
org.apache.flink.table.api.TableException: AppendStreamTableSink doesn't support consuming update changes which is produced by node GroupAggregate(groupBy=[msgType, site, timestamp], select=[msgType, site, timestamp])
You can confirm the version of flink.
The result of distinct will change continuously. The downstream should be RetractStreamTableSink.
The error shows that this version of flink collect is not supported upsert
The latest version of Flink collect already supports upsert
Codename One does not allow me to use DecimalFormat. So how do I work around this code:
private final static DecimalFormat balanceFormatter = new DecimalFormat("0.00");
private final String GetWalletBalanceEndpoint = "wallet/balance";
public GetWalletBalance() {
super();
endpoint = GetWalletBalanceEndpoint;
}
public final Double getBalance() {
Map<String, Object> balanceMap = getResponseData();
Double balance = (Double) balanceMap.get("balance");
return balance;
}
public String getBalance2DecimalsString() {
Double balance = getBalance();
return balanceFormatter.format(balance);
}
You can format decimals in Codename One using the L10NManager
private final L10NManager lnm = L10NManager.getInstance();
private final String GetWalletBalanceEndpoint = "wallet/balance";
public GetWalletBalance() {
super();
endpoint = GetWalletBalanceEndpoint;
}
public final String getBalance() {
Map<String, Object> balanceMap = getResponseData();
return lnm.format(Double.parseDouble(balanceMap.get("balance").toString), 2);
}
Note that values will be formatted based on the device localization. Some countries write 10.34 as 10,34, this format will be followed. You can force a particular country's format by setting the localization before formatting:
lnm.setLocale("US", "EN");
i have a visual force page which is used as a view to send a custom sms to the leads generated in salesforce.The year field is a number field on salesforce. Posting the controller and test class error. Also mentioning the error line.
Controller :-
//Class to send Message to Lead or Account
public class nmSendSMS
{
//Name of Lead or Account
public string strName{get;set;}
public Lead objLead {get;set;}
public String defaultNumbersToBeAdded;
public String leadYear{get;set;}
//Mobile number of lead of account
public string strMobile{get;set;}
public String messageToBeSent{get;set;}
public List<SelectOption> getYear{get;set;}
public string strSMSBody{get;set;}
//To disable fields if data is not available
String session,statusOfLead,stringOfMobileNumbers;
public nmSendSMS()
{
objLead = new Lead();
leadYear ='';
defaultNumbersToBeAdded = '9820834921,9920726538';
messageToBeSent = '';
stringOfMobileNumbers= '';
}
//Method to send SMS
public PageReference SendSMS()
{
if(leadYear!='' || messageToBeSent!='')
{
session = objLead.nm_Session__c;
integer lengthOfCommaSeperatedNumbers;
String finalString ='';
statusOfLead = objLead.Status;
list<lead> leadNumbersList = [select MobilePhone from Lead where Status=:statusOfLead and nm_Session__c=:session and nm_Year__c=:integer.valueOf(leadYear)];
for(Lead obj :leadNumbersList)
{
stringOfMobileNumbers = obj.MobilePhone+','+stringOfMobileNumbers;
}
System.debug('stringOfMobileNumbers -->'+stringOfMobileNumbers);
lengthOfCommaSeperatedNumbers = stringOfMobileNumbers.length();
finalString = stringOfMobileNumbers.substring(0,lengthOfCommaSeperatedNumbers-1);
finalString = finalString + defaultNumbersToBeAdded;
System.debug('Final String--->'+finalString+'Message To Be Sent-->'+messageToBeSent);
String response = SMSSenderWebService.sendSMSForNotContactedLead(finalString,messageToBeSent);
System.debug('Response-->'+response);
}
else
{
ApexPages.addmessage(new ApexPages.message(ApexPages.severity.Warning,'Please Mention all the fields For Lead Search'));
return null;
}
return null;
}
public List<SelectOption> getYear()
{
List<SelectOption> options = new List<SelectOption>();
options.add(new SelectOption('2016','2016'));
options.add(new SelectOption('2017','2017'));
options.add(new SelectOption('2018','2018'));
options.add(new SelectOption('2019','2019'));
return options;
}
}
Test Class:-
#isTest
public class nmSendSMSTracker
{
public static Lead obj;
public static nm_Centers__c objLearningCenter;
public static nm_Program__c program;
public static nm_EligiblityCriteria__c eligibility ;
static testMethod void tesMethod()
{
LoadData();
PageReference pg = new PageReference('/apex/nmSendSMS');
Test.setCurrentPage(pg);
Test.StartTest();
nmSendSMS smsSend = new nmSendSMS();
smsSend.getYear();
smsSend.objLead = obj;
smsSend.messageToBeSent ='Hello Vikas';
smsSend.SendSMS();
Test.StopTest();
}
static void LoadData()
{
nm_EscalationMatrix__c objCustomSeetings3 = new nm_EscalationMatrix__c();
objCustomSeetings3.name='0-1 Week';
objCustomSeetings3.nm_LCEscalationTime__c='22:45';
objCustomSeetings3.nm_RemidertoIC__c='22:45';
objCustomSeetings3.nm_HOEscalationTime__c='20:56';
objCustomSeetings3.nm_RemidertoHO__c='22:45';
insert objCustomSeetings3;
nm_EscalationMatrix__c objCustomSeetings = new nm_EscalationMatrix__c();
objCustomSeetings.name='2-4 Months';
objCustomSeetings.nm_LCEscalationTime__c='20:45';
objCustomSeetings.nm_RemidertoIC__c='21:45';
objCustomSeetings.nm_HOEscalationTime__c='20:56';
objCustomSeetings.nm_RemidertoHO__c='21:45';
insert objCustomSeetings;
nm_EscalationMatrix__c objCustomSeetings2 = new nm_EscalationMatrix__c();
objCustomSeetings2.name='3-6 Week';
objCustomSeetings2.nm_LCEscalationTime__c='20:34';
objCustomSeetings2.nm_RemidertoIC__c='21:45';
objCustomSeetings2.nm_HOEscalationTime__c='20:56';
objCustomSeetings2.nm_RemidertoHO__c='21:45';
insert objCustomSeetings2;
nm_Holidays__c objHoliday = new nm_Holidays__c();
objHoliday.Name='Holi';
objHoliday.nm_Date__c=system.today();
insert objHoliday;
// profile objprofile =[SELECT Id FROM Profile WHERE Name='System Administrator'];
user usr = [Select id from user limit 1];
SystemConfiguration__c objSystemConfiguration=new SystemConfiguration__c();
objSystemConfiguration.name='test';
objSystemConfiguration.nm_BusinessHoursStartTime__c='012213';
objSystemConfiguration.nm_BusinessHoursEndTime__c='0234533';
insert objSystemConfiguration;
Recordtype rt=[select id from Recordtype where sobjectType='nm_Centers__c' AND name ='Learning Center'];
objLearningCenter = new nm_Centers__c();
objLearningCenter.RecordTypeID =rt.id;
objLearningCenter.nm_CenterCode__c ='002';
objLearningCenter.nm_CenterCity__c='Delhi';
objLearningCenter.nm_City__c='Delhi';
objLearningCenter.nm_StateProvince__c='Delhi';
objLearningCenter.nm_Street__c='Laxmi Ngar';
objLearningCenter.nm_PostalCode__c='110091';
insert objLearningCenter;
program = new nm_Program__c();
program.nmIsActive__c = true;
program.nm_ProgramCode__c = 'test';
program.nm_ProgramDuration__c= 2.0;
program.nm_ProgramName__c = 'Post grad diploma finance';
program.nm_ProgramValidity__c = 4;
program.nm_TotalSemesters__c = 4;
program.nm_Type__c = 'Post Graduate Diploma Program';
insert program;
eligibility = new nm_EligiblityCriteria__c();
eligibility.Name = 'Bachelors degree';
eligibility.nm_EligiblityCriteria__c = 'bjhwbghbjgw';
eligibility.Experience_Required_In_Year__c= 2;
eligibility.Graduation_Percentage__c = 6;
eligibility.Graduation_Required__c = true;
insert eligibility;
obj = new Lead();
obj.Email='amit.kumar#saasfocus.com';
obj.MobilePhone='8377985721';
obj.FirstName='sandy';
obj.LastName='babar';
obj.nm_BloodGroup__c='B+';
obj.nm_Gender__c='male';
obj.nm_FathersName__c='subhash';
obj.nm_MothersName__c='kalpana';
obj.nm_StateProvince_P__c='maharashtra';
obj.nm_Nationality__c='Indian';
obj.nm_Street_P__c='xyz';
obj.nm_LocalityName__c='mohitep';
obj.nm_SelfLearningMaterial__c='Send to my shipping address';
obj.Status='Cold';
obj.nm_Session__c = 'January';
obj.nm_NameofBoard__c='CBSE';
obj.nm_EligiblityCriteria__c = eligibility.id;
obj.nm_Program__c = program.id;
obj.nm_InformationCenter__c =objLearningCenter.id;
obj.nm_10thPercentage__c=77.00;
obj.nm_NameofBoard__c='ICSC';
obj.nm_YearofCompletion__c='2000';
obj.nm_NameofSchool__c='nutan';
obj.nm_Class12OrDiploma__c='HSC';
obj.nm_NameofBoard12__c='LCSC';
obj.nm_YearofCompletion12__c='2002';
obj.nm_NameofSchool12__c='dfg';
obj.nm_Stream__c='adw';
obj.nm_BachelorsDegreeName__c='gfc';
obj.nm_Specialization__c='gf';
obj.nm_NameofUniversity__c='G K university';
obj.nm_BachelorsDegreePercentage__c=55.00;
obj.nm_GraduationDegreeMode__c='fgc';
obj.nm_YearofCollegeCompletion__c='2006';
obj.LeadSource='Web';
obj.OwnerId=usr.id;
insert obj;
}
}
Error Message:
System.TypeException: Invalid integer:
Class.nmSendSMS.SendSMS: line 37, column 1
Class.nmSendSMSTracker.tesMethod: line 19, column 1
List<Lead> leadNumbersList = [select MobilePhone from Lead where nm_Year__c=:leadYear];
Works for me and I get the correct Lead(s) in the list
I have a load test for a WCF service, where we are trying out different compression libraries and configurations and we need to measure the total number of mb sent during a test. Is there a performance counter that measure the relative trafic pr. test. If so, how do I add it to my load test - it seems that only a fraction of the performance counters are visible - E.g. under the category "Web Service", i don't see the performance counter "Total Bytes Received" in VS Load test, but I can find it in PerfMon.
Thanks
In the load test expand Counter Sets > Expand applicable > Right Click on Counter Sets > Add Counters
You can implement your own custom performance counter like so:
using System;
using System.Diagnostics;
using System.Net.NetworkInformation;
namespace PerfCounter
{
class PerfCounter
{
private const String categoryName = "Custom category";
private const String counterName = "Total bytes received";
private const String categoryHelp = "A category for custom performance counters";
private const String counterHelp = "Total bytes received on network interface";
private const String lanName = "Local Area Connection"; // change this to match your network connection
private const int sampleRateInMillis = 1000;
private const int numberofSamples = 200000;
private static NetworkInterface lan = null;
private static PerformanceCounter perfCounter;
private static long initialReceivedBytes;
static void Main(string[] args)
{
setupLAN();
setupCategory();
createCounters();
updatePerfCounters();
}
private static void setupCategory()
{
if (!PerformanceCounterCategory.Exists(categoryName))
{
CounterCreationDataCollection counterCreationDataCollection = new CounterCreationDataCollection();
CounterCreationData totalBytesReceived = new CounterCreationData();
totalBytesReceived.CounterType = PerformanceCounterType.NumberOfItems64;
totalBytesReceived.CounterName = counterName;
counterCreationDataCollection.Add(totalBytesReceived);
PerformanceCounterCategory.Create(categoryName, categoryHelp, PerformanceCounterCategoryType.MultiInstance, counterCreationDataCollection);
}
else
Console.WriteLine("Category {0} exists", categoryName);
}
private static void createCounters()
{
perfCounter = new PerformanceCounter(categoryName, counterName, false);
perfCounter.RawValue = getTotalBytesReceived();
}
private static long getTotalBytesReceived()
{
return lan.GetIPv4Statistics().BytesReceived;
}
private static void setupLAN()
{
NetworkInterface[] interfaces = NetworkInterface.GetAllNetworkInterfaces();
foreach (NetworkInterface networkInterface in interfaces)
{
if (networkInterface.Name.Equals(lanName))
lan = networkInterface;
}
initialReceivedBytes = lan.GetIPv4Statistics().BytesReceived;
}
private static void updatePerfCounters()
{
for (int i = 0; i < numberofSamples; i++)
{
perfCounter.RawValue = getTotalBytesReceived();
Console.WriteLine("received: {0} bytes", perfCounter.RawValue - initialReceivedBytes);
System.Threading.Thread.Sleep(sampleRateInMillis);
}
}
}
}