View Javadoc
1   /*
2    * Copyright 2007 Kasper B. Graversen
3    * 
4    * Licensed under the Apache License, Version 2.0 (the "License");
5    * you may not use this file except in compliance with the License.
6    * You may obtain a copy of the License at
7    * 
8    *     http://www.apache.org/licenses/LICENSE-2.0
9    * 
10   * Unless required by applicable law or agreed to in writing, software
11   * distributed under the License is distributed on an "AS IS" BASIS,
12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13   * See the License for the specific language governing permissions and
14   * limitations under the License.
15   */
16  package org.supercsv.benchmark;
17  
18  import java.io.File;
19  import java.io.FileReader;
20  import java.io.FileWriter;
21  import java.util.ArrayList;
22  import java.util.Arrays;
23  import java.util.HashMap;
24  import java.util.List;
25  import java.util.Map;
26  
27  import org.junit.BeforeClass;
28  import org.junit.FixMethodOrder;
29  import org.junit.Test;
30  import org.junit.runners.MethodSorters;
31  import org.supercsv.benchmark.cellprocessor.FormatIconColour;
32  import org.supercsv.benchmark.cellprocessor.FormatStopTypeAndName;
33  import org.supercsv.benchmark.model.TransportLocation;
34  import org.supercsv.cellprocessor.Optional;
35  import org.supercsv.cellprocessor.constraint.NotNull;
36  import org.supercsv.cellprocessor.ift.CellProcessor;
37  import org.supercsv.io.CsvBeanReader;
38  import org.supercsv.io.ICsvBeanReader;
39  import org.supercsv.prefs.CsvPreference;
40  import org.supercsv.util.Util;
41  
42  import com.carrotsearch.junitbenchmarks.annotation.BenchmarkMethodChart;
43  
44  /**
45   * Benchmarks each of the Super CSV writers. The CSV file used as the data for
46   * writing is the first 50,000 rows of a file free for download from the
47   * Guardian newspaper website
48   * (http://www.guardian.co.uk/news/datablog/2010/sep/27/uk-transport-national
49   * -public-data-repository#data). The file (and consquently the data being
50   * written) contains no embedded newlines.
51   * 
52   * @author James Bassett
53   */
54  @BenchmarkMethodChart(filePrefix = "StandardCsvWritingBenchmark")
55  @FixMethodOrder(MethodSorters.NAME_ASCENDING)
56  public class StandardCsvWritingBenchmarkTest extends AbstractCsvWritingBenchmark {
57  
58  	// CSV file with 50,001 lines (including header)
59  	private static final String CSV_FILE = "Britain's transport infrastructure.csv";
60  
61  	private static final String OUTPUT_DIR = "target" + File.separator;
62  
63  	private static final CsvPreference PREFS = CsvPreference.STANDARD_PREFERENCE;
64  
65  	private static String[] HEADER;
66  
67  	private static final CellProcessor[] PROCESSORS = { new NotNull(), // atcoCode
68  			new NotNull(), // easting
69  			new NotNull(), // northing
70  			new NotNull(), // longitude
71  			new NotNull(), // latitude
72  			new NotNull(), // commonName
73  			new Optional(), // identifier
74  			new NotNull(), // direction
75  			new NotNull(), // street
76  			new Optional(), // landmark
77  			new NotNull(), // natGazId
78  			new Optional(), // natGazLocality
79  			new NotNull(), // stopType
80  			new NotNull(new FormatStopTypeAndName()), // stopTypeAndName
81  			new NotNull(new FormatIconColour()), // iconColour
82  	};
83  
84  	// the number of data rows to read (max possible is 50,000)
85  	private static final int ROWS = 50000;
86  
87  	private static List<Object> BEAN_DATA;
88  	private static List<List<Object>> LIST_DATA;
89  	private static List<Map<String, Object>> MAP_DATA;
90  
91  	/**
92  	 * Set up the data for writing by reading the required number of records
93  	 * from the reading benchmark CSV file.
94  	 */
95  	@BeforeClass
96  	public static void setUpData() throws Exception {
97  		BEAN_DATA = new ArrayList<Object>();
98  		LIST_DATA = new ArrayList<List<Object>>();
99  		MAP_DATA = new ArrayList<Map<String, Object>>();
100 
101 		ICsvBeanReader beanReader = null;
102 		try {
103 			beanReader = new CsvBeanReader(new FileReader(CSV_FILE),
104 					CsvPreference.STANDARD_PREFERENCE);
105 			HEADER = beanReader.getHeader(true);
106 
107 			for (int j = 0; j < ROWS; j++) {
108 				TransportLocation location = beanReader.read(
109 						TransportLocation.class, HEADER,
110 						StandardCsvReadingBenchmarkTest.PROCESSORS);
111 
112 				// bean data
113 				BEAN_DATA.add(location);
114 
115 				// list data
116 				List<Object> list = Arrays.asList(location.toObjectArray());
117 				LIST_DATA.add(list);
118 
119 				// map data
120 				Map<String, Object> map = new HashMap<String, Object>();
121 				Util.filterListToMap(map, HEADER, list);
122 				MAP_DATA.add(map);
123 			}
124 
125 		} finally {
126 			beanReader.close();
127 		}
128 
129 	}
130 
131 	/**
132 	 * Times CsvListWriter.
133 	 */
134 	@Test
135 	public void testCsvListWriter() throws Exception {
136 		timeCsvListWriter(new FileWriter(OUTPUT_DIR + "CsvListWriter" + ROWS
137 				+ ".csv"), PREFS, HEADER, LIST_DATA, ROWS);
138 	}
139 
140 	/**
141 	 * Times CsvListWriter using processors.
142 	 */
143 	@Test
144 	public void testCsvListWriterUsingProcessors() throws Exception {
145 		timeCsvListWriterUsingProcessors(new FileWriter(OUTPUT_DIR
146 				+ "CsvListWriterUsingProcessors" + ROWS + ".csv"), PREFS,
147 				HEADER, LIST_DATA, PROCESSORS, ROWS);
148 
149 	}
150 
151 	/**
152 	 * Times CsvMapWriter.
153 	 */
154 	@Test
155 	public void testCsvMapWriter() throws Exception {
156 		timeCsvMapWriter(new FileWriter(OUTPUT_DIR + "CsvMapWriter" + ROWS
157 				+ ".csv"), PREFS, HEADER, MAP_DATA, ROWS);
158 	}
159 
160 	/**
161 	 * Times CsvMapWriter using processors.
162 	 */
163 	@Test
164 	public void testCsvMapWriterUsingProcessors() throws Exception {
165 		timeCsvMapWriterUsingProcessors(new FileWriter(OUTPUT_DIR
166 				+ "CsvMapWriterUsingProcessors" + ROWS + ".csv"), PREFS,
167 				HEADER, MAP_DATA, PROCESSORS, ROWS);
168 	}
169 
170 	/**
171 	 * Times CsvBeanWriter.
172 	 */
173 	@Test
174 	public void testCsvBeanWriter() throws Exception {
175 		timeCsvBeanWriter(new FileWriter(OUTPUT_DIR + "CsvBeanWriter" + ROWS
176 				+ ".csv"), PREFS, HEADER, BEAN_DATA, ROWS);
177 	}
178 
179 	/**
180 	 * Times CsvBeanWriter using processors.
181 	 */
182 	@Test
183 	public void testCsvBeanWriterUsingProcessors() throws Exception {
184 		timeCsvBeanWriterUsingProcessors(new FileWriter(OUTPUT_DIR
185 				+ "CsvBeanWriterUsingProcessors" + ROWS + ".csv"), PREFS,
186 				HEADER, BEAN_DATA, PROCESSORS, ROWS);
187 	}
188 
189 	/**
190 	 * Times CsvDozerBeanWriter.
191 	 */
192 	@Test
193 	public void testCsvDozerBeanWriter() throws Exception {
194 		timeCsvDozerBeanWriter(new FileWriter(OUTPUT_DIR + "CsvDozerBeanWriter"
195 				+ ROWS + ".csv"), PREFS, HEADER, TransportLocation.class,
196 				BEAN_DATA, ROWS);
197 	}
198 
199 	/**
200 	 * Times CsvDozerBeanWriter using processors.
201 	 */
202 	@Test
203 	public void testCsvDozerBeanWriterUsingProcessors() throws Exception {
204 		timeCsvDozerBeanWriterUsingProcessors(new FileWriter(OUTPUT_DIR
205 				+ "CsvDozerBeanWriterUsingProcessors" + ROWS + ".csv"), PREFS,
206 				HEADER, TransportLocation.class, BEAN_DATA, PROCESSORS, ROWS);
207 	}
208 
209 }