001package io.konik.csv.mapper;
002
003import java.io.BufferedReader;
004import java.io.File;
005import java.io.FileInputStream;
006import java.io.FileReader;
007import java.io.IOException;
008import java.io.InputStreamReader;
009import java.util.Arrays;
010import java.util.LinkedList;
011import java.util.List;
012import java.util.concurrent.CopyOnWriteArrayList;
013
014import javax.annotation.Nullable;
015
016import org.dozer.DozerBeanMapper;
017import org.dozer.loader.api.BeanMappingBuilder;
018import org.dozer.loader.api.FieldDefinition;
019import org.dozer.loader.api.FieldsMappingOption;
020import org.dozer.loader.api.FieldsMappingOptions;
021import org.dozer.loader.api.TypeMappingBuilder;
022import org.dozer.loader.api.TypeMappingOptions;
023import org.supercsv.cellprocessor.Optional;
024import org.supercsv.cellprocessor.ift.CellProcessor;
025import org.supercsv.io.dozer.CsvDozerBeanData;
026import org.supercsv.io.dozer.CsvDozerBeanReader;
027import org.supercsv.prefs.CsvPreference;
028
029import com.google.common.base.Function;
030import com.google.common.collect.Lists;
031
032public class CsvMapperBuilder {
033
034   private final CopyOnWriteArrayList<Column> columns = new CopyOnWriteArrayList<Column>();
035
036   private final CsvPreference csvPreference;
037
038   public CsvMapperBuilder(CsvPreference csvPreference) {
039      this.csvPreference = csvPreference;
040   }
041
042   public CsvMapperBuilder add(Column.Builder builder) {
043      columns.add(builder.build());
044      return this;
045   }
046
047   public CsvMapperBuilder addColumns(List<Column> columns) {
048      this.columns.addAll(columns);
049      return this;
050   }
051
052   public CellProcessor[] getCellProcessors() {
053      return Lists.transform(columns, new Function<Column, CellProcessor>() {
054         public CellProcessor apply(Column column) {
055            return column.processor;
056         }
057      }).toArray(new CellProcessor[columns.size()]);
058   }
059
060   public String[] getColumnNames() {
061      return Lists.transform(columns, new Function<Column, String>() {
062         public String apply(Column column) {
063            return column.name;
064         }
065      }).toArray(new String[columns.size()]);
066   }
067
068   public DozerBeanMapper buildBeanMapper(final Class<?> destinationObjectClass) {
069      DozerBeanMapper beanMapper = new DozerBeanMapper();
070      beanMapper.addMapping(new BeanMappingBuilder() {
071         @Override
072         protected void configure() {
073            TypeMappingBuilder readerBuilder = mapping(CsvDozerBeanData.class, destinationObjectClass,
074                  TypeMappingOptions.oneWay(), TypeMappingOptions.wildcard(false), TypeMappingOptions.mapNull(false));
075
076            TypeMappingBuilder writerBuilder = mapping(destinationObjectClass,
077                  type(CsvDozerBeanData.class).mapNull(true), TypeMappingOptions.oneWay(),
078                  TypeMappingOptions.wildcard(false));
079
080            for (int i = 0; i < columns.size(); i++) {
081               Column column = columns.get(i);
082
083               if (column == null) {
084                  throw new NullPointerException(String.format("fieldMapping at index %d should not be null", i));
085               }
086
087               String srcField = "columns[" + i + "]";
088
089               if (column.fieldDefinition != null) {
090                  readerBuilder.fields(srcField, column.fieldDefinition, column.mappingOptions);
091               } else {
092                  readerBuilder.fields(srcField, column.name, column.mappingOptions);
093               }
094
095               writerBuilder.fields(column.name, srcField, FieldsMappingOptions.copyByReference());
096            }
097         }
098      });
099
100      return beanMapper;
101   }
102
103   public static Column.Builder column(String header) {
104      return Column.builder().name(header);
105   }
106
107   public static CsvMapperBuilder withHeadersFromCsvFile(final File csvFile,
108         final ColumnsConfigurer columnsConfigurer) {
109      if (!csvFile.exists()) {
110         throw new IllegalArgumentException("File does not exist!");
111      }
112
113      CsvPreference csvPreference = recognizeCsvPreference(csvFile);
114
115      try {
116         final CsvDozerBeanReader reader = new CsvDozerBeanReader(
117               new InputStreamReader(new FileInputStream(csvFile), "UTF-8"), csvPreference);
118         final String[] headers = reader.getHeader(true);
119         reader.close();
120
121         List<Column> columns = Lists.transform(Arrays.asList(headers), new Function<String, Column>() {
122            @Nullable
123            @Override
124            public Column apply(String input) {
125               return columnsConfigurer.getColumnDefinitionForHeader(input);
126            }
127         });
128
129         return new CsvMapperBuilder(csvPreference).addColumns(columns);
130
131      } catch (Exception e) {
132         throw new RuntimeException("CsvMapperBuilder initialization failed", e);
133      }
134   }
135
136   public static CsvPreference recognizeCsvPreference(File file) {
137      String[] lines = new String[2];
138      try {
139         BufferedReader bufferedReader = new BufferedReader(new FileReader(file));
140         int lineNum = 0;
141         String line;
142
143         while ((line = bufferedReader.readLine()) != null && lineNum < 2) {
144            lines[lineNum++] = line.replaceAll("\"([^\"]+)\"", "_");
145         }
146
147         if (isEmptyLine(lines[0]) || isEmptyLine(lines[1])) {
148            throw new IllegalArgumentException("CSV file has to contain a header and at least one row");
149         }
150
151      } catch (IOException e) {
152         throw new IllegalStateException("Delimiter recognition failed", e);
153      }
154
155      if (isDelimiter(",", lines[0], lines[1])) {
156         return CsvPreference.STANDARD_PREFERENCE;
157      }
158
159      if (isDelimiter(";", lines[0], lines[1])) {
160         return CsvPreference.EXCEL_NORTH_EUROPE_PREFERENCE;
161      }
162
163      throw new IllegalStateException("Delimiter for the CSV file could not be found");
164   }
165
166   private static boolean isEmptyLine(String line) {
167      return line == null || line.isEmpty();
168   }
169
170   private static boolean isDelimiter(String delimiter, String lineOne, String lineTwo) {
171      return lineOne.split(delimiter).length == lineTwo.split(delimiter).length && lineOne.contains(delimiter);
172   }
173
174   public CsvDozerBeanReader getBeanReader(File csvFile, Class<?> beanType) {
175      try {
176         CsvDozerBeanReader reader = new CsvDozerBeanReader(
177               new InputStreamReader(new FileInputStream(csvFile), "UTF-8"), csvPreference, buildBeanMapper(beanType));
178         reader.getHeader(true);
179         return reader;
180
181      } catch (IOException e) {
182         throw new RuntimeException("Bean reader initialization failed", e);
183      }
184   }
185
186   static class Column {
187      final String name;
188      final Class<?> type;
189      final CellProcessor processor;
190      final FieldsMappingOption[] mappingOptions;
191      final FieldDefinition fieldDefinition;
192
193      public Column(Builder builder) {
194         this.name = builder.name;
195         this.type = builder.type;
196         this.processor = builder.processor;
197         this.mappingOptions = builder.mappingOptions.toArray(new FieldsMappingOption[builder.mappingOptions.size()]);
198         this.fieldDefinition = builder.fieldDefinition;
199      }
200
201      public static Builder builder() {
202         return new Builder();
203      }
204
205      @Override
206      public String toString() {
207         return "Column{" + "name='" + name + '\'' + ", type=" + type + ", processor=" + processor + ", mappingOptions="
208               + Arrays.toString(mappingOptions) + ", fieldDefinition=" + fieldDefinition + '}';
209      }
210
211      public static class Builder {
212         private String name;
213         private Class<?> type = String.class;
214         private CellProcessor processor = new Optional();
215         private List<FieldsMappingOption> mappingOptions = new LinkedList<FieldsMappingOption>();
216         private FieldDefinition fieldDefinition;
217
218         public Builder name(String name) {
219            this.name = name;
220            this.fieldDefinition = new FieldDefinition(name)
221                  .setMethod(String.format("set%s", capitalize(extractChildsField(name))));
222            return this;
223         }
224
225         public Builder type(Class<?> type) {
226            this.type = type;
227            return this;
228         }
229
230         public Builder processor(CellProcessor processor) {
231            this.processor = processor;
232            return this;
233         }
234
235         public Builder mappingOptions(FieldsMappingOption... mappingOptions) {
236            this.mappingOptions.addAll(Arrays.asList(mappingOptions));
237            return this;
238         }
239
240         public Builder fieldDefinition(FieldDefinition fieldDefinition) {
241            this.fieldDefinition = fieldDefinition;
242            return this;
243         }
244
245         public Column build() {
246            if (mappingOptions.isEmpty()) {
247               mappingOptions.add(FieldsMappingOptions.hintB(type));
248            }
249            return new Column(this);
250         }
251
252         private static String capitalize(String str) {
253            if (str != null && str.length() > 0) {
254               return str.substring(0, 1).toUpperCase() + str.substring(1);
255            }
256            return str;
257         }
258
259         private static String extractChildsField(String path) {
260            if (path.lastIndexOf(".") >= 0) {
261               return path.substring(path.lastIndexOf(".") + 1);
262            }
263            return path;
264         }
265      }
266   }
267}