Commit 09ef1a62 authored by Adam Rousell's avatar Adam Rousell
Browse files

Merge branch 'dev' into 'master'

Dev

See merge request !1
parents 534f22a7 196e6d00
/node_modules/*
var stats = {};
var name = 'Age';
var description = 'Information regarding the age of the features based on date/time fields.';
exports.analyse = function(srcData) {
var data = JSON.parse(JSON.stringify(srcData));
// we need to look through the attributes to find which attributes are date/time type
// we look at the metadata and attribute information for this
//console.log(data);
var dates = [];
for(var i = 0; i<data.meta.length; i++) {
var f = data.meta[i];
//console.log(f.type);
if(f.type === 'stringdate' || f.type === 'epochdate') {
dates.push(f);
}
}
for(var i = 0; i<data.properties.length; i++) {
var f = data.properties[i];
if(f.type === 'stringdate' || f.type === 'epochdate') {
dates.push(f);
}
}
// Now we need to go through and get the values for each of the date fields from the data itself
var dateData = [];
for(var i=0; i<dates.length; i++) {
var d = dates[i];
var values = [];
for (var j=0; j<data.data.length; j++) {
var f = data.data[j];
var val = f[d.name];
if(val) {
if(d.type === 'stringdate') {
values.push(Date.parse(val));
} else {
values.push(val);
}
}
}
dateData.push({
field: d.name,
values: values
});
}
// Now we need to do the analsys on them
var statsData = {
name: name,
description: description,
fields: []
};
for(var i=0; i<dateData.length; i++) {
var d = dateData[i];
// Loop through all the values
var min = Date.parse(new Date());
var max = 0;
var sum = 0;
var total = d.values.length;
for(var j=0; j<total; j++) {
var v = d.values[j];
if(v < min) {
min = v;
}
if(v > max) {
max = v;
}
sum = sum + v;
}
var ave = sum / total;
var firstSplit = d.field.indexOf('_');
var lastSplit = d.field.lastIndexOf('_');
var fieldName = d.field.substring(firstSplit+1, lastSplit);
statsData.fields.push({
field: fieldName,
metrics: [
{
name: 'Newest',
value: new Date(max)
},
{
name: 'Oldest',
value: new Date(min)
},
{
name: 'Average',
value: new Date(ave)
}
]
});
}
return statsData;
}
\ No newline at end of file
var attr_count = require('./attr_count');
var attr_data = require('./attr_data');
var age = require('./age');
exports.analyse = function(tablename, data) {
var results = [];
// we want to return a json array containing the values obtained from the methods
results.push(attr_count.analyse(data));
results.push(age.analyse(data));
results.push(attr_data.analyse(data));
return results;
}
\ No newline at end of file
var db = require('../db');
var stats = {};
var name = 'Attribute count';
var description = 'Information regarding the number of attributes that are associated to features.';
exports.analyse = function(srcData) {
// We need to go through the data and calculate for each record how many attributes it has
// First lets create a copy of the data
var data = JSON.parse(JSON.stringify(srcData));
// Now for each item we want to count how many attributes are present
var a = [];
for(var i=0; i<data.data.length; i++) {
var f = data.data[i];
//console.log(f);
var count = 0;
for(var k in f) {
if(f.hasOwnProperty(k)) {
++count;
}
}
a.push(count);
}
var min = 9999999999;
var max = 0;
var sum = 0;
for(var i=0; i<a.length; i++) {
if(a[i] > max)
max = a[i];
if(a[i] < min)
min = a[i];
sum = sum + a[i];
}
var ave = sum / a.length;
stats = {
min: min,
max: max,
average: ave
};
// Now lets create some information for showing on a chart
// First lets calculate how many groups we should have
var span = max - min;
// If the span is large, then we need to use groups
var grpSize = 1;
var spans = span;
if(span > 10) {
// Make the groups
grpSize = span/5;
spans = 5;
}
var groups = [
{
maxval: min,
count: 0
}
];
for(var i=0; i<spans; i++) {
groups.push({
minval: min + (i*grpSize),
maxval: min + ((i+1)*grpSize),
count: 0
});
}
// Now we need to go through each count value and store it in the correct group
for(var i=0; i<a.length; i++) {
var c = a[i];
for(var j=0; j<groups.length; j++) {
var g = groups[j];
if(c <= g.maxval && (c >g.minval || !g.minval)) {
g.count = g.count + 1;
//console.log()
break;
}
}
}
var labels = [];
var values = [];
for(var i=0; i<groups.length; i++) {
var g = groups[i];
labels.push(g.maxval);
values.push(g.count);
}
return {
name: name,
description: description,
fields: [
{
metrics: [
{
name: 'Maximum',
value: stats.max
},
{
name: 'Minimum',
value: stats.min
},
{
name: 'Average',
value: stats.average
}
],
data: {
labels: labels,
values: values
}
}
]
};
};
var stats = {};
var name = 'Tags';
var description = 'Information about each attribute regarding the values that have been entered.';
function getAttributeName(a) {
var firstSplit = a.indexOf('_');
var lastSplit = a.lastIndexOf('_');
var fieldName = a.substring(firstSplit+1, lastSplit);
return fieldName;
}
function valueArrayToString(attrValues) {
var str = '';
var len = attrValues.length;
if(len > 10) {
str = attrValues[0] + ', ' + attrValues[1] + ' and ' + (len-2) + ' others.';
} else {
for(var i=0; i<len; i++) {
str = str + attrValues[i];
if(i !== (len-1)) {
str = str + ', ';
}
}
}
return str;
}
exports.analyse = function(srcData) {
// Go through each attribute present and look at what values have been entered.
var data = JSON.parse(JSON.stringify(srcData));
// Get the list of attributes from the meta and properties
var attributes = [];
attributes = data.meta.concat(data.properties);
// For each attribute we need to iterate over every object and record the values
var featureData = data.data;
var featureCount = featureData.length;
var attrInfo = {};
// Loop over all the attributes that are available and create a holder record for it
for(var i=0; i<attributes.length; i++) {
attrInfo[attributes[i].name] = {
type: attributes[i].type,
values: {}
};
}
// Now loop over every feature and store the values against the attributes
// If the attribute value already exists, then we add one to the count, else we create a new entry
for(var i=0; i<featureCount; i++) {
var f = featureData[i];
for(var a in f) {
if(f.hasOwnProperty(a)) {
var fa = f[a];
var da = attrInfo[a];
if(da) {
// Look for the value of the feature in the values for the attribute
var v = da.values[fa];
//console.log(a + ': ' + v);
if(v) {
// Already exists so increase the count by 1
da.values[fa] = v + 1;
} else {
// Not seen yet so create
da.values[fa] = 1;
}
}
}
}
}
// The contents of attrInfo now tells us the count of each value for each attribute
var ret = {
name: name,
description: description,
fields: []
};
// Loop through each attribute and show the most/least values along with data for the chart
// which will all be returned back in one large array
for(var a in attrInfo) {
var attr = attrInfo[a];
if(attrInfo.hasOwnProperty(a)) {
var labels = [];
var counts = [];
var mostFrequent = 0;
var leastFrequent = 999999999;
var mostFrequentValues = [];
var leastFrequentValues = [];
// Loop through each
var attrValues = attr.values;
for(var v in attrValues) {
if(attrValues.hasOwnProperty(v)) {
var countValue = attrValues[v];
// First we will store the information in the array
labels.push(v);
counts.push(countValue);
// Check count and update the information
if(countValue < leastFrequent) {
leastFrequent = countValue;
leastFrequentValues = [];
leastFrequentValues.push(v);
} else if(countValue === leastFrequent) {
leastFrequentValues.push(v);
}
if(countValue > mostFrequent) {
mostFrequent = countValue;
mostFrequentValues = [];
mostFrequentValues.push(v);
} else if(countValue === mostFrequent) {
mostFrequentValues.push(v);
}
}
}
labels.push('No value');
counts.push(featureCount - counts.reduce(function(a,b) {
return a + b;
}, 0));
var metrics = [];
if(leastFrequent == mostFrequent) {
metrics.push({
name: "Only value(s)",
value: valueArrayToString(mostFrequentValues) + ' (' + mostFrequent + ')'
});
} else {
metrics = [
{
name: 'Most frequent value',
value: valueArrayToString(mostFrequentValues) + ' (' + mostFrequent + ')'
},
{
name: 'Least frequent value',
value: valueArrayToString(leastFrequentValues) + ' (' + leastFrequent + ')'
}
];
}
var stats = {
field: getAttributeName(a.toString()),
metrics: metrics,
data: {
labels: labels,
values: counts
}
}
ret.fields.push(stats);
}
}
return ret;
}
\ No newline at end of file
var express = require("express");
var path = require("path");
var multer = require('multer');
var logger = require('morgan');
var bodyParser = require('body-parser');
var index = require('./routes/index');
var analyse = require('./routes/analyse');
var error = require('./routes/error');
var upload = multer();
var app = express();
app.set('view engine', 'pug');
app.set('views', path.resolve(__dirname, 'views'));
app.use(logger('dev'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use('/public', express.static(path.join(__dirname, 'public')));
app.use('/', index);
app.use('/analyse', analyse);
app.use(function(err, req, res, next) {
res.status(500);
res.render('error', {
message: 'There was an error',
error: {}
})
});
//app.use('/error', error);
/*app.get('/', function(req, res) {
res.render('index');
});
app.post('/analyse', upload.single('jsonfile'), function(req, res, next) {
// Read the form submitted
//res.render('analyse', {filename: name});
})*/
app.listen(3000);
\ No newline at end of file
var connection;
function connect () {
var mysql = require('mysql');
connection = mysql.createConnection({
host: 'localhost',
user: 'wegovnow',
password: '23t9fh92!78Iw8of34',
database: 'wegovnow'
});
connection.connect();
}
exports.connect = function () {
connect();
};
function disconnect() {
connection.end();
}
exports.disconnect = function () {
disconnect();
};
exports.createTable = function (cols) {
// create the query
var name = 'table_' + (new Date).getTime();
var query = "CREATE TABLE " + name + " (";
// NEED TO DO PRIMARY KEY COLUMN
for(var c in cols) {
if(cols.hasOwnProperty(c)) {
// c is the column object
var col = cols[c];
query = query + " " + connection.escapeId(col.name);
var t = "";
switch(col.type) {
case "string":
t = "VARCHAR(256)";
break;
case "integer":
t = "INTEGER";
break;
case "decimal":
t = "FLOAT";
break;
case "epochdate":
t = "INTEGER";
break;
case "stringdate":
t = "VARCHAR(64)";
break;
case "boolean":
t = "BOOLEAN";
break;
default:
t = "VARCHAR(256)";
break;
}
query = query + " " + t;
// add a comma
query = query + ",";
}
}
// if there have been columns added, then there will be a trailing ',' which needs removing
query = query.slice(0, -1);
query = query + ");"
connection.query(query, function(err, rows, fields) {
if(err)
console.log("ERROR: " + err);
});
return name;
};
exports.addData = function (table, row) {
// row will be a geojson object in the format:
/*
{
"type": {
"data": "xxx",
"db_name": "meta_type_string"
},
"properties": [
"id": {
"data": 12345,
"db_name": "attr_id_integer"
}
]
}
*/
var dbColumns = [];
// First add the meta items
for(var key in row) {
if(row.hasOwnProperty(key)) {
if(key !== "geometry" && key !== "properties") {
var obj = row[key];
dbColumns.push(new dbColumn(obj.db_name, obj.data));
}
}
}
// now do the properties
if(row.properties !== null) {
for (var p in row.properties) {
if(row.properties.hasOwnProperty(p)) {
var obj = row.properties[p];
dbColumns.push(new dbColumn(obj.db_name, obj.data));
}
}
}
// now use the column array to add the data to the db table
var sql = "INSERT INTO " + table + " (";
var colIds = "";
var colVals = "";
for(var c in dbColumns) {
if(dbColumns.hasOwnProperty(c)) {
var col = dbColumns[c];
colIds = colIds + " " + connection.escapeId(col.name) + ",";
if(typeof col.data === "string")
colVals = colVals + " \"" + connection.escape(col.data) + "\",";
else
colVals = colVals + " " + connection.escape(col.data) + ",";
}
}
colIds = colIds.slice(0, -1);
colVals = colVals.slice(0, -1);
sql = sql + colIds + ") VALUES (" + colVals + ");";
connection.query(sql, function(err, rows, fields) {
if(err)
console.log("ERROR: " + err);
});
};
function dbColumn(name, data) {
this.name = name;
this.data = data;
};