1 module mutils.serializer.lua;
2 
3 import std.experimental.allocator;
4 import std.experimental.allocator.mallocator;
5 import std.meta;
6 
7 import mutils.container.vector;
8 public import mutils.serializer.common;
9 import mutils.serializer.lexer_utils;
10 import mutils.serializer.lua_json_token;
11 
12 //  COS==ContainerOrSlice
13 
14 /**
15  * Serializer to save data in lua format
16  * If serialized data have to be allocated it is not saved/loaded unless it has "malloc" UDA (@("malloc"))
17  */
18 class LuaSerializer {
19 	/**
20 	 * Function loads and saves data depending on compile time variable load
21 	 * If useMalloc is true pointers, arrays, classes will be saved and loaded using Mallocator
22 	 * T is the serialized variable
23 	 * COS is char[] when load==Load.yes 
24 	 * COS container supplied by user in which data is stored when load==Load.no(save) 
25 	 */
26 	void serialize(Load load, bool useMalloc = false, T, COS)(ref T var, ref COS con) {
27 		try {
28 			static if (load == Load.yes) {
29 				LuaLexer lex = LuaLexer(cast(string) con, true, true);
30 				auto tokens = lex.tokenizeAll();
31 				//load
32 				__gshared static LuaSerializerToken serializer = new LuaSerializerToken();
33 				serializer.serialize!(Load.yes, useMalloc)(var, tokens[]);
34 				tokens.clear();
35 			} else {
36 				__gshared static LuaSerializerToken serializer = new LuaSerializerToken();
37 				TokenDataVector tokens;
38 				serializer.serialize!(Load.no, useMalloc)(var, tokens);
39 				tokensToCharVectorPreatyPrint!(LuaLexer)(tokens[], con);
40 				tokens.clear();
41 			}
42 		}
43 		catch (Exception e) {
44 		}
45 	}
46 
47 	//support for rvalues during load
48 	void serialize(Load load, bool useMalloc = false, T, COS)(ref T var, COS con) {
49 		static assert(load == Load.yes);
50 		serialize!(load, useMalloc)(var, con);
51 	}
52 
53 	__gshared static LuaSerializer instance = new LuaSerializer();
54 
55 }
56 
57 //-----------------------------------------
58 //--- Tests
59 //-----------------------------------------
60 
61 // test formating
62 // test customVector of char serialization
63 unittest {
64 	string pa1 = "aaa aaa";
65 	string pa2 = "ddd";
66 	string pa3 = "ccc";
67 	static struct TestStruct {
68 		struct Project {
69 			Vector!char name;
70 			Vector!char path;
71 			Vector!char texPath;
72 			int ccc;
73 		}
74 
75 		Vector!Project projects;
76 	}
77 
78 	TestStruct test;
79 	TestStruct.Project p1, p2;
80 	p1.path ~= cast(char[]) pa1;
81 	p1.ccc = 100;
82 
83 	p2.name ~= cast(char[]) pa2;
84 	p2.path ~= cast(char[]) pa3;
85 	p2.ccc = 200;
86 	test.projects ~= p1;
87 	test.projects ~= p2;
88 	Vector!char container;
89 
90 	//save
91 	LuaSerializer.instance.serialize!(Load.no)(test, container);
92 
93 	//reset var
94 	test = TestStruct.init;
95 
96 	//load
97 	LuaSerializer.instance.serialize!(Load.yes)(test, container[]);
98 	assert(test.projects.length == 2);
99 	assert(test.projects[0].name[] == "");
100 	assert(test.projects[0].path[] == "aaa aaa");
101 	assert(test.projects[0].ccc == 100);
102 
103 	assert(test.projects[1].name[] == "ddd");
104 	assert(test.projects[1].path[] == "ccc");
105 	assert(test.projects[1].ccc == 200);
106 }
107 
108 unittest {
109 
110 	static struct TestStruct {
111 		int a;
112 		int b;
113 		@("malloc") string c;
114 	}
115 
116 	TestStruct test;
117 	Vector!char container;
118 	string str = `
119 	
120 {
121     b   =145    ,  a=  1,   c               =  
122 
123 
124 "asdasdas asdasdas asdasd asd"
125 }
126 `;
127 
128 	//load
129 	LuaSerializer.instance.serialize!(Load.yes)(test, cast(char[]) str);
130 	//writeln(test);
131 	assert(test.a == 1);
132 	assert(test.b == 145);
133 	assert(test.c == "asdasdas asdasdas asdasd asd");
134 }
135 
136 //-----------------------------------------
137 //--- Lexer 
138 //-----------------------------------------
139 
140 struct LuaLexer {
141 	enum Token {
142 		notoken = StandardTokens.notoken,
143 		white = StandardTokens.white,
144 		character = StandardTokens.character,
145 		identifier = StandardTokens.identifier,
146 		string_ = StandardTokens.string_,
147 		double_ = StandardTokens.double_,
148 		long_ = StandardTokens.long_,
149 		comment_multiline = StandardTokens.comment_multiline,
150 		comment_line = StandardTokens.comment_line,
151 	}
152 
153 	alias characterTokens = AliasSeq!('[', ']', '{', '}', '(', ')', ',', '=');
154 
155 	string code;
156 	string slice;
157 	bool skipUnnecessaryWhiteTokens = true;
158 	bool skipComments = true;
159 
160 	uint line;
161 	uint column;
162 
163 	@disable this();
164 
165 	this(string code, bool skipWhite, bool skipComments) {
166 		this.code = code;
167 		slice = this.code[];
168 		skipUnnecessaryWhiteTokens = skipWhite;
169 		this.skipComments = skipComments;
170 	}
171 
172 	void clear() {
173 		code = null;
174 		line = column = 0;
175 		slice = null;
176 	}
177 
178 	TokenData checkNextToken() {
179 		auto sliceCopy = slice;
180 		auto token = getNextToken();
181 		slice = sliceCopy;
182 		return token;
183 	}
184 
185 	private TokenData getNextTokenImpl() {
186 		TokenData token;
187 		switch (slice[0]) {
188 			//------- character tokens ------------------------
189 			foreach (ch; characterTokens) {
190 		case ch:
191 			}
192 			token = slice[0];
193 			slice = slice[1 .. $];
194 			return token;
195 
196 			//--------- white tokens --------------------------
197 			foreach (ch; whiteTokens) {
198 		case ch:
199 			}
200 			serializeWhiteTokens!(true)(token, slice);
201 			return token;
202 
203 			//------- escaped strings -------------------------
204 		case '"':
205 			serializeStringToken!(true)(token, slice);
206 			return token;
207 
208 			//------- comment -------------------------
209 		case '/':
210 			check(slice.length > 1);
211 			if (slice[1] == '*') {
212 				serializeCommentMultiline!(true)(token, slice);
213 			} else if (slice[1] == '/') {
214 				serializeCommentLine!(true)(token, slice);
215 			} else {
216 				check(false);
217 			}
218 			return token;
219 
220 			//------- something else -------------------------
221 		default:
222 			break;
223 		}
224 		if (isIdentifierFirstChar(slice[0])) {
225 			serializeIdentifier!(true)(token, slice);
226 		} else if ((slice[0] >= '0' && slice[0] <= '9') || slice[0] == '-') {
227 			serializeNumberToken!(true)(token, slice);
228 		} else {
229 			slice = null;
230 		}
231 		return token;
232 	}
233 
234 	TokenData getNextToken() {
235 		TokenData token;
236 		string sliceCopy = slice[];
237 		scope (exit) {
238 			token.line = line;
239 			token.column = column;
240 			updateLineAndCol(line, column, sliceCopy, slice);
241 		}
242 		while (slice.length > 0) {
243 			token = getNextTokenImpl();
244 			if ((skipComments && token.isComment)
245 					|| (skipUnnecessaryWhiteTokens && token.type == Token.white)) {
246 				token = TokenData.init;
247 				continue;
248 			}
249 			break;
250 		}
251 		return token;
252 	}
253 
254 	static void toChars(Vec)(TokenData token, ref Vec vec) {
255 
256 		final switch (cast(Token) token.type) {
257 		case Token.long_:
258 		case Token.double_:
259 			serializeNumberToken!(false)(token, vec);
260 			break;
261 		case Token.comment_multiline:
262 			serializeCommentMultiline!(false)(token, vec);
263 			break;
264 		case Token.comment_line:
265 			serializeCommentLine!(false)(token, vec);
266 			break;
267 		case Token.character:
268 			vec ~= token.ch;
269 			break;
270 		case Token.white:
271 		case Token.identifier:
272 			vec ~= cast(char[]) token.str;
273 			break;
274 		case Token.string_:
275 			vec ~= '"';
276 			vec ~= cast(char[]) token.getEscapedString();
277 			vec ~= '"';
278 			break;
279 
280 		case Token.notoken:
281 			assert(0);
282 		}
283 
284 	}
285 
286 }
287 
288 unittest {
289 	string code = `{ [ ala= "asdasd",
290 // hkasdf sdfasdfs sdf  &8 9 (( 7 ^ 	
291  ccc=123.3f  /* somethingsdfsd 75#^ */  ]}"`;
292 	LuaLexer lua = LuaLexer(code, true, false);
293 	auto tokens = lua.tokenizeAll();
294 	//writelnTokens(tokens[]);
295 }
296 
297 alias LuaSerializerToken = JSON_Lua_SerializerToken!(false);
298 
299 //-----------------------------------------
300 //--- Tests
301 //-----------------------------------------
302 
303 // test formating
304 // test customVector of char serialization
305 unittest {
306 	string pa1 = "aaa aaa";
307 	string pa2 = "ddd";
308 	static struct TestStruct {
309 		struct Project {
310 			Vector!char path;
311 			int ccc;
312 		}
313 
314 		Vector!Project projects;
315 	}
316 
317 	TestStruct test;
318 	TestStruct testB;
319 	TestStruct.Project p1;
320 	TestStruct.Project p2;
321 
322 	p1.path ~= cast(char[]) pa1;
323 	p1.ccc = 100;
324 	p2.path ~= cast(char[]) pa2;
325 	p2.ccc = 200;
326 	test.projects ~= p1;
327 	test.projects ~= p2;
328 	Vector!TokenData tokens;
329 	//save
330 	LuaSerializerToken.instance.serialize!(Load.no)(test, tokens); // Tokens are valid as long as test is not changed
331 
332 	//Vector!char vv;
333 	//tokensToCharVectorPreatyPrint!(LuaLexer)(tokens[], vv);
334 
335 	//load
336 	LuaSerializerToken.instance.serialize!(Load.yes)(testB, tokens[]);
337 	assert(testB.projects.length == 2);
338 	assert(testB.projects[0].ccc == 100);
339 	assert(testB.projects[0].path[] == cast(char[]) "aaa aaa");
340 	assert(testB.projects[1].ccc == 200);
341 	assert(testB.projects[1].path[] == cast(char[]) "ddd");
342 }