package be.nikiroo.utils.serial;
import java.io.IOException;
-import java.io.UnsupportedEncodingException;
+import java.io.InputStream;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
+import be.nikiroo.utils.IOUtils;
import be.nikiroo.utils.StringUtils;
+import be.nikiroo.utils.streams.NextableInputStream;
+import be.nikiroo.utils.streams.NextableInputStreamStep;
/**
* A simple class that can accept the output of {@link Exporter} to recreate
* @author niki
*/
public class Importer {
- static private Integer SIZE_ID = null;
- static private byte[] NEWLINE = null;
-
private Boolean link;
private Object me;
private Importer child;
private String currentFieldName;
- static {
- try {
- SIZE_ID = "EXT:".getBytes("UTF-8").length;
- NEWLINE = "\n".getBytes("UTF-8");
- } catch (UnsupportedEncodingException e) {
- // UTF-8 is mandated to exist on confirming jre's
- }
- }
-
/**
* Create a new {@link Importer}.
*/
* content, or a number of lines of it (any given line <b>MUST</b> be
* complete though) and accumulate it with the already present data.
*
- * @param data
+ * @param in
* the data to parse
*
* @return itself so it can be chained
* if a class described in the serialised data cannot be found
* @throws IOException
* if the content cannot be read (for instance, corrupt data)
+ * @throws NullPointerException
+ * if the stream is empty
*/
- public Importer read(String data) throws NoSuchFieldException,
- NoSuchMethodException, ClassNotFoundException, IOException {
- return read(data.getBytes("UTF-8"), 0);
- }
+ public Importer read(InputStream in) throws NoSuchFieldException,
+ NoSuchMethodException, ClassNotFoundException, IOException,
+ NullPointerException {
- /**
- * Read some data into this {@link Importer}: it can be the full serialised
- * content, or a number of lines of it (any given line <b>MUST</b> be
- * complete though) and accumulate it with the already present data.
- *
- * @param data
- * the data to parse
- * @param offset
- * the offset at which to start reading the data (we ignore
- * anything that goes before that offset)
- *
- * @return itself so it can be chained
- *
- * @throws NoSuchFieldException
- * if the serialised data contains information about a field
- * which does actually not exist in the class we know of
- * @throws NoSuchMethodException
- * if a class described in the serialised data cannot be created
- * because it is not compatible with this code
- * @throws ClassNotFoundException
- * if a class described in the serialised data cannot be found
- * @throws IOException
- * if the content cannot be read (for instance, corrupt data)
- */
- private Importer read(byte[] data, int offset) throws NoSuchFieldException,
- NoSuchMethodException, ClassNotFoundException, IOException {
+ NextableInputStream stream = new NextableInputStream(in,
+ new NextableInputStreamStep('\n'));
- int dataStart = offset;
- while (dataStart < data.length) {
- String id = "";
- if (data.length - dataStart >= SIZE_ID) {
- id = new String(data, dataStart, SIZE_ID);
+ try {
+ if (in == null) {
+ throw new NullPointerException("InputStream is null");
}
-
- boolean zip = id.equals("ZIP:");
- boolean b64 = id.equals("B64:");
- if (zip || b64) {
- dataStart += SIZE_ID;
+ if (stream.eof()) {
+ throw new NullPointerException("InputStream is empty");
}
- int count = find(data, dataStart, NEWLINE);
- count -= dataStart;
- if (count < 0) {
- count = data.length - dataStart;
- }
+ while (stream.next()) {
+ boolean zip = stream.startsWiths("ZIP:");
+ boolean b64 = stream.startsWiths("B64:");
- if (zip || b64) {
- boolean unpacked = false;
- try {
- byte[] line = StringUtils.unbase64(data, dataStart, count,
+ if (zip || b64) {
+ stream.skip("XXX:".length());
+ InputStream decoded = StringUtils.unbase64(stream.open(),
zip);
- unpacked = true;
- read(line, 0);
- } catch (IOException e) {
- throw new IOException("Internal error when decoding "
- + (unpacked ? "unpacked " : "")
- + (zip ? "ZIP" : "B64")
- + " content: input may be corrupt");
+ try {
+ read(decoded);
+ } finally {
+ decoded.close();
+ }
+ } else {
+ processLine(stream);
}
- } else {
- String line = new String(data, dataStart, count, "UTF-8");
- processLine(line);
}
-
- dataStart += count + NEWLINE.length;
+ } finally {
+ stream.close(false);
}
return this;
* Read a single (whole) line of serialised data into this {@link Importer}
* and accumulate it with the already present data.
*
- * @param line
+ * @param in
* the line to parse
*
* @return TRUE if we are just done with one object or sub-object
* @throws IOException
* if the content cannot be read (for instance, corrupt data)
*/
- private boolean processLine(String line) throws NoSuchFieldException,
+ private boolean processLine(InputStream in) throws NoSuchFieldException,
NoSuchMethodException, ClassNotFoundException, IOException {
+
// Defer to latest child if any
if (child != null) {
- if (child.processLine(line)) {
+ if (child.processLine(in)) {
if (currentFieldName != null) {
setField(currentFieldName, child.getValue());
currentFieldName = null;
return false;
}
+ // TODO use the stream, Luke
+ String line = IOUtils.readSmallStream(in);
+
if (line.equals("{")) { // START: new child if needed
if (link != null) {
child = new Importer(map);
}
}
- /**
- * Find the given needle in the data and return its position (or -1 if not
- * found).
- *
- * @param data
- * the data to look through
- * @param offset
- * the offset at wich to start searching
- * @param needle
- * the needle to find
- *
- * @return the position of the needle if found, -1 if not found
- */
- private int find(byte[] data, int offset, byte[] needle) {
- for (int i = offset; i + needle.length - 1 < data.length; i++) {
- boolean same = true;
- for (int j = 0; j < needle.length; j++) {
- if (data[i + j] != needle[j]) {
- same = false;
- break;
- }
- }
-
- if (same) {
- return i;
- }
- }
-
- return -1;
- }
-
/**
* Return the current deserialised value.
*