WeekOverview analysis working
This commit is contained in:
parent
713d972953
commit
b6e486158d
3
.gitignore
vendored
3
.gitignore
vendored
@ -1,3 +1,4 @@
|
||||
*.iml
|
||||
.idea
|
||||
.project
|
||||
.project
|
||||
target
|
@ -1,116 +0,0 @@
|
||||
package de.hsel.spm.baudas;
|
||||
|
||||
/* Beispielprogramm, um WeKa in eclipse zu verwenden.
|
||||
|
||||
Die Rohdaten liegen im CSV-Format vor.
|
||||
|
||||
Die Rohdaten enthalten die folgenden 26 Attribute,
|
||||
0..9 Kundendaten und Einkaufsverhalten
|
||||
10 Einkaufssumme
|
||||
11..25 gekaufte Waren
|
||||
|
||||
Hier sind nur ein paar Beispiele zu sehen, wie man die Analysen verwenden kann.
|
||||
Weitere Einstellungen (falls n<EFBFBD>tig) selbst recherchieren!
|
||||
*/
|
||||
|
||||
import java.io.*;
|
||||
|
||||
import weka.associations.Apriori;
|
||||
import weka.clusterers.SimpleKMeans;
|
||||
import weka.core.Instances;
|
||||
import weka.core.converters.CSVLoader;
|
||||
import weka.filters.Filter;
|
||||
import weka.filters.unsupervised.attribute.NumericCleaner;
|
||||
import weka.filters.unsupervised.attribute.NumericToNominal;
|
||||
|
||||
public class WekaBeispiel {
|
||||
|
||||
void findCluster(Instances daten, int anz) throws Exception {
|
||||
|
||||
SimpleKMeans model = new SimpleKMeans();
|
||||
model.setNumClusters(anz);
|
||||
|
||||
model.buildClusterer(daten);
|
||||
System.out.println(model);
|
||||
|
||||
}
|
||||
|
||||
void makeApriori(Instances daten) throws Exception {
|
||||
|
||||
// umwandeln in gekauft / nicht gekauft (0/1)
|
||||
NumericCleaner nc = new NumericCleaner();
|
||||
nc.setMaxThreshold(1.0); // Schwellwert auf 1 setzen
|
||||
nc.setMaxDefault(1.0); // alles ueber Schwellwert durch 1 ersetzen
|
||||
nc.setInputFormat(daten);
|
||||
daten = Filter.useFilter(daten, nc); // Filter anwenden.
|
||||
|
||||
// Die Daten als nominale und nicht als numerische Daten setzen
|
||||
NumericToNominal num2nom = new NumericToNominal();
|
||||
num2nom.setAttributeIndices("first-last");
|
||||
num2nom.setInputFormat(daten);
|
||||
daten = Filter.useFilter(daten, num2nom);
|
||||
|
||||
Apriori model = new Apriori();
|
||||
|
||||
model.buildAssociations(daten);
|
||||
|
||||
System.out.println(model);
|
||||
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
// Eigenen Dateipfad eintragen, nicht meinen nehmen ;-)
|
||||
String path = "/home/julian/IntellijProjects/Softwareprojektmanagement/src/test/resources/";
|
||||
String roh = path + "kd100.csv";
|
||||
|
||||
Instances alleDaten, nurWaren, nurKunden;
|
||||
|
||||
|
||||
WekaBeispiel dt = new WekaBeispiel();
|
||||
|
||||
|
||||
// CSV-Datei laden
|
||||
CSVLoader loader = new CSVLoader();
|
||||
loader.setSource(new File(roh));
|
||||
alleDaten = loader.getDataSet();
|
||||
|
||||
// 0 durch ? ersetzen, um f<EFBFBD>r die Auswertung nur die Waren zu
|
||||
// ber<EFBFBD>cksichtigen, die gekauft wurden
|
||||
NumericCleaner nc = new NumericCleaner();
|
||||
nc.setMinThreshold(1.0); // Schwellwert auf 1 setzen
|
||||
nc.setMinDefault(Double.NaN); // alles unter Schwellwert durch ? ersetzen
|
||||
nc.setInputFormat(alleDaten);
|
||||
alleDaten = Filter.useFilter(alleDaten, nc); // Filter anwenden. Wichtig: alleDaten =
|
||||
|
||||
|
||||
|
||||
// Clusteranalyse mit 5 Clustern <EFBFBD>ber alle Daten
|
||||
dt.findCluster(alleDaten, 5);
|
||||
|
||||
// Assoziationsanalyse <EFBFBD>ber alle Daten - Sinn zweifelhaft ;)
|
||||
dt.makeApriori(alleDaten);
|
||||
|
||||
|
||||
// Kundendaten rausnehmen, nur Warenk<EFBFBD>rbe stehen lassen
|
||||
nurWaren = new Instances(alleDaten);
|
||||
for (int i = 0; i < 11; i++) {
|
||||
nurWaren.deleteAttributeAt(0); // ein einzelnes Attribut rausnehmen
|
||||
}
|
||||
// Assoziationsanalyse der gekauften Waren
|
||||
dt.makeApriori(nurWaren);
|
||||
|
||||
|
||||
|
||||
// Waren rausnehmen, nur Kundendaten stehen lassen
|
||||
nurKunden = new Instances(alleDaten);
|
||||
for (int i = 0; i < 16; i++) {
|
||||
nurKunden.deleteAttributeAt(10); // ein einzelnes Attribut rausnehmen
|
||||
}
|
||||
|
||||
// Clusteranalyse mit 3 Clustern <EFBFBD>ber die Kundendaten
|
||||
dt.findCluster(nurKunden, 3);
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -8,7 +8,7 @@ import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* base interface for analysis
|
||||
* base interface for analysis.
|
||||
*
|
||||
* @author Johannes Theiner
|
||||
* @version 0.1
|
||||
@ -19,7 +19,7 @@ public interface Analysis<T> {
|
||||
|
||||
|
||||
/**
|
||||
* loads data from csv file
|
||||
* loads data from csv file.
|
||||
*
|
||||
* @param file File to analyze
|
||||
* @return loaded data in weka format
|
||||
@ -37,7 +37,7 @@ public interface Analysis<T> {
|
||||
}
|
||||
|
||||
/**
|
||||
* result of the analysis
|
||||
* result of the analysis.
|
||||
*
|
||||
* @return result as T
|
||||
*/
|
||||
|
@ -1,73 +1,63 @@
|
||||
package de.hsel.spm.baudas.analysis;
|
||||
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import weka.core.Instance;
|
||||
import weka.core.Instances;
|
||||
import weka.filters.Filter;
|
||||
import weka.filters.unsupervised.attribute.Remove;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.AbstractMap;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Week Overview Analysis
|
||||
* Week Overview Analysis.
|
||||
*
|
||||
* @author Julian Hinxlage
|
||||
* @version 0.1
|
||||
* @since 0.1
|
||||
**/
|
||||
public class WeekOverview implements Analysis<Map<String, Integer>> {
|
||||
public class WeekOverview implements Analysis<Map<String, Map.Entry<Double, Integer>>> {
|
||||
|
||||
private Instances instances;
|
||||
private Map<String, Integer> result;
|
||||
private Map<String, Map.Entry<Double, Integer>> result;
|
||||
|
||||
public WeekOverview(File file) {
|
||||
result = new HashMap<String, Integer>();
|
||||
result = new HashMap<>();
|
||||
instances = load(file);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Integer> getResult() {
|
||||
public Map<String, Map.Entry<Double, Integer>> getResult() {
|
||||
|
||||
int[] keepIndexes = new int[]{5, 10};
|
||||
int dayIndex = 0;
|
||||
int amountIndex = 1;
|
||||
int dayIndex = 5;
|
||||
int amountIndex = 10;
|
||||
|
||||
int startArticles = 11;
|
||||
int endArticles = 25;
|
||||
|
||||
|
||||
try {
|
||||
//Remove all other attributes
|
||||
Remove remove = new Remove();
|
||||
remove.setAttributeIndicesArray(keepIndexes);
|
||||
remove.setInvertSelection(true);
|
||||
remove.setInputFormat(instances);
|
||||
instances = Filter.useFilter(instances, remove);
|
||||
} catch (Exception ex) {
|
||||
ex.printStackTrace();
|
||||
}
|
||||
for (int i = 0; i < instances.numInstances(); i++) {
|
||||
Instance instance = instances.get(i);
|
||||
double amount = instance.value(amountIndex);
|
||||
String day = instance.stringValue(dayIndex);
|
||||
|
||||
|
||||
for (int i = 0; i < instances.attribute(dayIndex).numValues(); i++) {
|
||||
String day = instances.attribute(dayIndex).value(i);
|
||||
|
||||
System.out.println(day);
|
||||
|
||||
|
||||
for (int j = 0; j < instances.attribute(amountIndex).numValues(); j++) {
|
||||
String amount = instances.attribute(amountIndex).value(j);
|
||||
|
||||
System.out.println(amount);
|
||||
|
||||
int x = Integer.parseInt(amount);
|
||||
|
||||
if(!result.containsKey(day)){
|
||||
result.put(day,0);
|
||||
}
|
||||
result.put(day,result.get(day) + x);
|
||||
|
||||
int count = 0;
|
||||
for (int j = startArticles; j < endArticles; j++) {
|
||||
count += (int) instance.value(j);
|
||||
}
|
||||
|
||||
if (!result.containsKey(day)) {
|
||||
result.put(day, new AbstractMap.SimpleEntry<>(0.0, 0));
|
||||
}
|
||||
result.put(day,
|
||||
new AbstractMap.SimpleEntry<>(
|
||||
result.get(day).getKey() + amount,
|
||||
result.get(day).getValue() + count
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
package de.hsel.spm.baudas;
|
||||
package de.hsel.spm.baudas.Analysis;
|
||||
|
||||
import de.hsel.spm.baudas.analysis.WeekOverview;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@ -17,13 +17,13 @@ import java.util.Map;
|
||||
public class WeekOverviewTest {
|
||||
|
||||
@Test
|
||||
public void test(){
|
||||
public void test() {
|
||||
WeekOverview overview = new WeekOverview(new File(getClass().getClassLoader().getResource("kd100.csv").getFile()));
|
||||
|
||||
Map<String, Integer> result = overview.getResult();
|
||||
Map<String, Map.Entry<Double, Integer>> result = overview.getResult();
|
||||
|
||||
for(Map.Entry<String, Integer> i : result.entrySet()){
|
||||
System.out.println(i.getKey() + " = " + i.getValue());
|
||||
for (Map.Entry<String, Map.Entry<Double, Integer>> i : result.entrySet()) {
|
||||
System.out.println(i.getKey() + " = " + i.getValue().getKey() + "€ " + i.getValue().getValue());
|
||||
}
|
||||
|
||||
}
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue
Block a user