Skip to content

Commit

Permalink
Merge pull request #39 from qcri-social/staging
Browse files Browse the repository at this point in the history
Merging staging to master (Release 1.6)
  • Loading branch information
kushalkantgoyal committed Jul 14, 2015
2 parents 38fb7b3 + 8e4d412 commit 700e9b6
Show file tree
Hide file tree
Showing 207 changed files with 4,758 additions and 3,408 deletions.
11 changes: 9 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,21 @@ local.properties
# OS X Generated Files
.DS_Store

# Backup files and directories
*.bak

# Compressed data
*.zip
*.tar
*.gz

#################
## Eclipse
#################

*.project
*.classpath
*.settings


# Locally stored "Eclipse launch configurations"
*.launch
*.launch
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

package qa.qcri.aidr.analysis.api;

import javax.ejb.EJB;
Expand All @@ -21,6 +22,14 @@
import qa.qcri.aidr.common.code.DateFormatConfig;
import qa.qcri.aidr.output.getdata.ChannelBufferManager;


/**
*
* This is the REST API interface for accessing the aidr_analytics DB's confidence_data entity.
*
* This class is not used at the moment.
*/

@Path("/confData/")
public class GetConfidenceStatistics extends GetStatistics implements ServletContextListener {

Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,9 @@
package qa.qcri.aidr.analysis.api;

import java.util.Date;
import java.util.ArrayList;
package qa.qcri.aidr.analysis.api;

import java.util.List;
import java.util.Map;
import java.util.TreeMap;

import javax.ejb.EJB;
import javax.json.Json;
import javax.json.JsonObject;

import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.ws.rs.DefaultValue;
Expand All @@ -26,30 +19,19 @@

import org.apache.log4j.Logger;

import com.fasterxml.jackson.databind.ObjectMapper;

import qa.qcri.aidr.common.code.FasterXmlWrapper;
import qa.qcri.aidr.analysis.dto.TagCountDTO;
import qa.qcri.aidr.analysis.dto.TagCountSeriesDTO;
import qa.qcri.aidr.analysis.dto.TimeWindowTagCountDTO;
import qa.qcri.aidr.analysis.dto.helper.TagCountDTOHelper;
import qa.qcri.aidr.analysis.dto.helper.TagCountSeriesDTOHelper;
import qa.qcri.aidr.analysis.dto.helper.TimeWindowTagCountDTOHelper;
import qa.qcri.aidr.analysis.entity.TagData;
import qa.qcri.aidr.analysis.entity.TagDataPK;
import qa.qcri.aidr.analysis.facade.TagDataStatisticsResourceFacade;
import qa.qcri.aidr.analysis.utils.JsonResponse;
import qa.qcri.aidr.analysis.service.GetTagDataStatisticsService;

/**
* This is the REST API interface for accessing the aidr_analytics DB's tag_data entity.
*/

@Path("/tagData/")
public class GetTagDataStatistics extends GetStatistics implements ServletContextListener {

// Debugging
private static Logger logger = Logger.getLogger(GetTagDataStatistics.class);

@EJB
private TagDataStatisticsResourceFacade tagDataEJB;

/**
*
* @param crisisCode
Expand All @@ -65,30 +47,8 @@ public Response getTagCountSumFromTime(@PathParam("crisisCode") String crisisCod
@PathParam("attributeCode") String attributeCode,
@PathParam("granularity") Long granularity,
@DefaultValue("0") @QueryParam("startTime") Long startTime) {

//long timeGranularity = DateFormatConfig.parseTime(granularity);
// First get the list of data points from DB
List<TagData> tagDataList = tagDataEJB.getDataAfterTimestampGranularity(crisisCode, attributeCode, null, startTime, granularity);

// Now the real work - count and send response
JSONObject json = JsonResponse.getNewJsonResponseObject(crisisCode, attributeCode, granularity);
json.put("startTime", new Date(startTime));
if (tagDataList != null) {
Map<String, Integer> tagCountMap = new TreeMap<String, Integer>();
for (TagData t: tagDataList) {
if (tagCountMap.containsKey(t.getLabelCode())) {
tagCountMap.put(t.getLabelCode(), tagCountMap.get(t.getLabelCode()) + t.getCount());
} else {
tagCountMap.put(t.getLabelCode(), t.getCount());
}
}
try {
json.put("data", tagCountMap);
} catch (Exception e) {
logger.info("Error in serializing fetched tag count data");
e.printStackTrace();
}
}

JSONObject json = new GetTagDataStatisticsService().getTagCountSumFromTime(crisisCode, attributeCode, granularity, startTime);
return Response.ok(json.toJSONString()).build();
}

Expand All @@ -108,29 +68,7 @@ public Response getTagCountInTimeWindow(@PathParam("crisisCode") String crisisCo
@PathParam("granularity") Long granularity,
@DefaultValue("0") @QueryParam("startTime") Long startTime) {

//long timeGranularity = DateFormatConfig.parseTime(granularity);
// First get the list of data points from DB
List<TagData> tagDataList = tagDataEJB.getDataByGranularityInTimeWindow(crisisCode, attributeCode, null, startTime, granularity);

// Now the real work - count and send response
TimeWindowTagCountDTO dto = new TimeWindowTagCountDTO();
JSONObject json = JsonResponse.getNewJsonResponseObject(crisisCode, attributeCode, granularity);
json.put("timestamp", new Date(startTime));

if (tagDataList != null) {
try {
List<TagCountDTO> dtoList = new ArrayList<TagCountDTO>();
for (TagData t: tagDataList) {
dtoList.add(TagCountDTOHelper.convertTagDataToDTO(t));
System.out.println("tag: " + t.getLabelCode() + ", count: " + t.getCount());
}
dto = TimeWindowTagCountDTOHelper.convertTagCountDTOListToDTO(startTime, dtoList);
json.put("data", dto);
} catch (Exception e) {
logger.info("Error in serializing fetched tag count data");
e.printStackTrace();
}
}
JSONObject json = new GetTagDataStatisticsService().getTagCountInTimeWindow(crisisCode, attributeCode, granularity, startTime);
return Response.ok(json.toJSONString()).build();
}

Expand All @@ -155,50 +93,8 @@ public Response getTagCountTimeSeries(@PathParam("crisisCode") String crisisCode
if (null == endTime || endTime < startTime) {
endTime = System.currentTimeMillis();
}
//long timeGranularity = DateFormatConfig.parseTime(granularity);
// First get the list of data points from DB
List<TagData> tagDataList = tagDataEJB.getDataInIntervalWithGranularity(crisisCode, attributeCode, null, startTime, endTime, granularity);

// Now the real work - creat time series, format and send response
JSONObject json = new JSONObject();
if (tagDataList != null) {
try {
// Create time series Map data first
Map<Long, List<TagCountDTO>> tagCountMap = new TreeMap<Long, List<TagCountDTO>>();
for (TagData t: tagDataList) {
if (tagCountMap.containsKey(t.getTimestamp())) {
List<TagCountDTO> tagsList = tagCountMap.get(t.getTimestamp());
if (null == tagsList || tagsList.isEmpty()) {
tagsList = new ArrayList<TagCountDTO>();
tagCountMap.put(t.getTimestamp(), tagsList);
}
tagsList.add(TagCountDTOHelper.convertTagDataToDTO(t));
} else {
tagCountMap.put(t.getTimestamp(), new ArrayList<TagCountDTO>());
List<TagCountDTO> tagsList = tagCountMap.get(t.getTimestamp());
tagsList.add(TagCountDTOHelper.convertTagDataToDTO(t));
}
}
//System.out.println("Finished creating Map of timestamp versus TagCountDTO list");
// Now convert the above time series data Map to DTO object for response
List<TimeWindowTagCountDTO> timeWindowDTOList = new ArrayList<TimeWindowTagCountDTO>();
for (Long key: tagCountMap.keySet()) {
TimeWindowTagCountDTO timeWindowDTO = TimeWindowTagCountDTOHelper.convertTagCountDTOListToDTO(key, tagCountMap.get(key));
timeWindowDTOList.add(timeWindowDTO);
}
//System.out.println("Finished creating TimeWindowTagCountDTO list");
TagCountSeriesDTO dto = TagCountSeriesDTOHelper.convertTimeWindowTagCountDTOListToDTO(crisisCode, attributeCode, granularity, timeWindowDTOList);
//System.out.println("Finished creating TagCountSeriesDTO");
ObjectMapper mapper = FasterXmlWrapper.getObjectMapper();
//json.put("time_series", dto);
//System.out.println("Finished creating json object: " + json);
return Response.ok(mapper.writeValueAsString(dto)).build();
} catch (Exception e) {
logger.info("Error in serializing fetched tag count data");
e.printStackTrace();
}
}
return Response.ok(JsonResponse.getNewJsonResponseObject(crisisCode, attributeCode, granularity).toJSONString()).build();
JSONObject json = new GetTagDataStatisticsService().getTagCountTimeSeries(crisisCode, attributeCode, granularity, startTime, endTime);
return Response.ok(json.toJSONString()).build();
}

/**
Expand All @@ -219,34 +115,7 @@ public Response getTagCountSumInInterval(@PathParam("crisisCode") String crisisC
@DefaultValue("0") @QueryParam("startTime") Long startTime,
@QueryParam("endTime") Long endTime) {

//long timeGranularity = DateFormatConfig.parseTime(granularity);
// First get the list of data points from DB
List<TagData> tagDataList = tagDataEJB.getDataInIntervalWithGranularity(crisisCode, attributeCode, null, startTime, endTime, granularity);

// Now the real work - count and send response
JSONObject json = JsonResponse.getNewJsonResponseObject(crisisCode, attributeCode, granularity);
json.put("startTime", new Date(startTime));
json.put("endTime", new Date(endTime));

if (tagDataList != null) {
Map<String, Integer> tagCountMap = new TreeMap<String, Integer>();
for (TagData t: tagDataList) {
System.out.println("Looking at tag: " + t.getLabelCode() + ", having count = " + t.getCount());
if (tagCountMap.containsKey(t.getLabelCode())) {
tagCountMap.put(t.getLabelCode(), tagCountMap.get(t.getLabelCode()) + t.getCount());
System.out.println("Update Map for OLD tag = " + t.getLabelCode() + " with count = " + tagCountMap.get(t.getLabelCode()));
} else {
tagCountMap.put(t.getLabelCode(), t.getCount());
System.out.println("Update Map with NEW tag = " + t.getLabelCode() + " with count = " + tagCountMap.get(t.getLabelCode()));
}
}
try {
json.put("data", tagCountMap);
} catch (Exception e) {
logger.info("Error in serializing fetched tag count data");
e.printStackTrace();
}
}
JSONObject json = new GetTagDataStatisticsService().getTagCountSumInInterval(crisisCode, attributeCode, granularity, startTime, endTime);
return Response.ok(json.toJSONString()).build();
}

Expand All @@ -268,24 +137,7 @@ public Response getSingleItem(@PathParam("crisisCode") String crisisCode,
@PathParam("labelCode") String labelCode,
@PathParam("granularity") Long granularity,
@PathParam("startTime") Long startTime) {

//long timeGranularity = DateFormatConfig.parseTime(granularity);
TagDataPK tagDataPK = new TagDataPK();
tagDataPK.setCrisisCode(crisisCode);
tagDataPK.setTimestamp(startTime);
tagDataPK.setGranularity(granularity);
tagDataPK.setAttributeCode(attributeCode);
tagDataPK.setLabelCode(labelCode);

TagData obj = tagDataEJB.getSingleDataByPK(tagDataPK);
JSONObject json = JsonResponse.getNewJsonResponseObject(crisisCode, attributeCode, granularity);
try {
json.put("timestamp", new Date(startTime));
json.put("data", obj);
} catch (Exception e) {
logger.error("Serialization error");
logger.error("Exception:" + e);
}
JSONObject json = new GetTagDataStatisticsService().getSingleItem(crisisCode, attributeCode, labelCode, granularity, startTime);
return Response.ok(json.toJSONString()).build();
}

Expand Down
Loading

0 comments on commit 700e9b6

Please sign in to comment.