1 module mutils.serializer.lua;
2 
3 import std.meta;
4 
5 public import mutils.serializer.common;
6 
7 //import mutils.serializer.lua_token : LuaSerializerToken, LuaLexer;
8 import mutils.serializer.lexer_utils;
9 
10 
11 /**
12  * Serializer to save data in lua format
13  * If serialized data have to be allocated it is not saved/loaded unless it has "malloc" UDA (@("malloc"))
14  */
15 class LuaSerializer{
16 	/**
17 	 * Function loads and saves data depending on compile time variable load
18 	 * If useMalloc is true pointers, arrays, classes will be saved and loaded using Mallocator
19 	 * T is the serialized variable
20 	 * ContainerOrSlice is char[] when load==Load.yes 
21 	 * ContainerOrSlice container supplied by user in which data is stored when load==Load.no(save) 
22 	 */
23 	void serialize(Load load,bool useMalloc=false, T, ContainerOrSlice)(ref T var,ref ContainerOrSlice con){
24 		try{
25 			static if(load==Load.yes){				
26 				LuaLexer lex=LuaLexer(cast(string)con, true, true);
27 				auto tokens=lex.tokenizeAll();				
28 				//load
29 				__gshared static LuaSerializerToken serializer= new LuaSerializerToken();
30 				serializer.serialize!(Load.yes, useMalloc)(var,tokens[]);		
31 				tokens.clear();
32 			}else{
33 				__gshared static LuaSerializerToken serializer= new LuaSerializerToken();
34 				TokenDataVector tokens;
35 				serializer.serialize!(Load.no, useMalloc)(var,tokens);
36 				tokensToCharVectorPreatyPrint!(LuaLexer)(tokens[],con);
37 				tokens.clear();
38 			}
39 		}catch(Exception e){}
40 	}
41 	
42 	//support for rvalues during load
43 	void serialize(Load load,bool useMalloc=false, T, ContainerOrSlice)(ref T var,ContainerOrSlice con){
44 		static assert(load==Load.yes);
45 		serialize!(load,useMalloc)(var,con);		
46 	}
47 
48 	__gshared static LuaSerializer instance= new LuaSerializer();
49 
50 }
51 
52 
53 //-----------------------------------------
54 //--- Tests
55 //-----------------------------------------
56 import mutils.container.vector;
57 // test formating
58 // test customVector of char serialization
59 unittest{
60 	static struct TestStruct{
61 		struct Project
62 		{
63 			Vector!char name;
64 			Vector!char path;
65 			Vector!char texPath;
66 			int ccc;
67 		}
68 		
69 		Vector!Project projects;
70 	}
71 	TestStruct test;
72 	TestStruct.Project p1,p2;
73 	p1.name~=[];
74 	p1.path~=['a', 'a', 'a', ' ', 'a', 'a', 'a'];
75 	p1.texPath~=[];
76 	p1.ccc=100;
77 
78 	p2.name~=['d', 'd', 'd'];
79 	p2.path~=['c', 'c', 'c'];
80 	p2.texPath~=[];
81 	p2.ccc=200;
82 	test.projects~=p1;
83 	test.projects~=p2;
84 	Vector!char container;
85 	
86 	//save
87 	__gshared static LuaSerializer serializer= new LuaSerializer();
88 	serializer.serialize!(Load.no)(test,container);
89 	//writeln(container[]);
90 	
91 	//reset var
92 	test=TestStruct.init;
93 	
94 	//load
95 	serializer.serialize!(Load.yes)(test,container[]);
96 	assert(test.projects.length==2);
97 	assert(test.projects[0].name[]=="");
98 	assert(test.projects[0].path[]=="aaa aaa");
99 	assert(test.projects[0].ccc==100);
100 	
101 	assert(test.projects[1].name[]=="ddd");
102 	assert(test.projects[1].path[]=="ccc");
103 	assert(test.projects[1].ccc==200);
104 }
105 
106 unittest{
107 	
108 	static struct TestStruct{
109 		int a;
110 		int b;
111 		@("malloc") string c;
112 	}
113 	TestStruct test;
114 	Vector!char container;
115 	string str=`
116 	
117 {
118     b   =145    ,  a=  1,   c               =  
119 
120 
121 "asdasdas asdasdas asdasd asd"
122 }
123 `;
124 	
125 	
126 	//load
127 	LuaSerializer.instance.serialize!(Load.yes)(test,cast(char[])str);
128 	//writeln(test);
129 	assert(test.a==1);
130 	assert(test.b==145);
131 	assert(test.c=="asdasdas asdasdas asdasd asd");
132 }
133 
134 
135 //-----------------------------------------
136 //--- Lexer 
137 //-----------------------------------------
138 
139 struct LuaLexer{
140 	enum Token{
141 		notoken=StandardTokens.notoken,
142 		white=StandardTokens.white,
143 		character=StandardTokens.character,
144 		identifier=StandardTokens.identifier,
145 		string_=StandardTokens.string_,
146 		double_=StandardTokens.double_,
147 		long_=StandardTokens.long_,
148 		comment_multiline=StandardTokens.comment_multiline,
149 		comment_line=StandardTokens.comment_line,
150 	}
151 	
152 	alias characterTokens=AliasSeq!('[',']','{','}','(',')',',','=');
153 	
154 	Vector!char code;
155 	string slice;
156 	bool skipUnnecessaryWhiteTokens=true;
157 	bool skipComments=true;
158 	
159 	uint line;
160 	uint column;
161 	
162 	@disable this();
163 	
164 	this(string code, bool skipWhite, bool skipComments){
165 		this.code~=cast(char[])code;
166 		slice=cast(string)this.code[];
167 		skipUnnecessaryWhiteTokens=skipWhite;
168 		this.skipComments=skipComments;
169 	}
170 	
171 	
172 	void clear(){
173 		code.clear();
174 		line=column=0;
175 		slice=null;
176 	}
177 	
178 	
179 	TokenData checkNextToken(){
180 		auto sliceCopy=slice;
181 		auto token=getNextToken();
182 		slice=sliceCopy;
183 		return token;
184 	}
185 	
186 	private TokenData getNextTokenImpl(){
187 		TokenData token;
188 		switch(slice[0]){
189 			//------- character tokens ------------------------
190 			foreach(ch;characterTokens){
191 				case ch:
192 			}
193 			token=slice[0];
194 			slice=slice[1..$];
195 			return token;
196 			
197 			
198 			//--------- white tokens --------------------------
199 			foreach(ch;whiteTokens){
200 				case ch:
201 			}
202 			serializeWhiteTokens!(true)(token,slice);
203 			return token;
204 			
205 			//------- escaped strings -------------------------
206 			case '"':
207 				serializeStringToken!(true)(token,slice);
208 				return token;
209 				
210 				//------- comment -------------------------
211 			case '/':
212 				check(slice.length>1);
213 				if(slice[1]=='*'){
214 					serializeCommentMultiline!(true)(token,slice);
215 				}else if(slice[1]=='/'){
216 					serializeCommentLine!(true)(token,slice);
217 				}else{
218 					check(false);
219 				}
220 				return token;
221 				
222 				//------- something else -------------------------
223 			default:
224 				break;
225 		}
226 		if(isIdentifierFirstChar(slice[0])){
227 			serializeIdentifier!(true)(token,slice);
228 		}else if((slice[0]>='0' && slice[0]<='9') || slice[0]=='-'){
229 			serializeNumberToken!(true)(token,slice);
230 		}else{
231 			slice=null;
232 		}
233 		return token;
234 	}
235 	
236 	TokenData getNextToken(){
237 		TokenData token;
238 		string sliceCopy=slice[];
239 		scope(exit){
240 			token.line=line;
241 			token.column=column;
242 			updateLineAndCol(line,column,sliceCopy,slice);
243 		}
244 		while(slice.length>0){
245 			token=getNextTokenImpl();
246 			if(
247 				(skipComments && token.isComment) ||
248 				(skipUnnecessaryWhiteTokens && token.type==Token.white)
249 				){
250 				token=TokenData.init;
251 				continue;
252 			}
253 			break;
254 		}
255 		return token;
256 	}
257 	
258 	static void toChars(Vec)(TokenData token, ref Vec vec){
259 		
260 		final switch(cast(Token)token.type){
261 			case Token.long_:
262 			case Token.double_:
263 				serializeNumberToken!(false)(token,vec);
264 				break;
265 			case Token.comment_multiline:
266 				serializeCommentMultiline!(false)(token,vec);
267 				break;
268 			case Token.comment_line:
269 				serializeCommentLine!(false)(token,vec);
270 				break;
271 			case Token.character:
272 				vec~=token.ch;
273 				break;
274 			case Token.white:
275 			case Token.identifier:
276 				vec~=cast(char[])token.str;
277 				break;
278 			case Token.string_:
279 				vec~='"';
280 				vec~=cast(char[])token.getEscapedString();
281 				vec~='"';
282 				break;
283 				
284 			case Token.notoken:
285 				assert(0);
286 		}
287 		
288 	}
289 	
290 	
291 	
292 }
293 
294 unittest{
295 	string code=`{ [ ala= "asdasd",
296 // hkasdf sdfasdfs sdf  &8 9 (( 7 ^ 	
297  ccc=123.3f  /* somethingsdfsd 75#^ */  ]}"`;
298 	LuaLexer lua=LuaLexer(code,true,false);
299 	auto tokens=lua.tokenizeAll();
300 	//writelnTokens(tokens[]);
301 }
302 
303 
304 
305 
306 
307 import mutils.serializer.lua_json_token;
308 alias LuaSerializerToken= JSON_Lua_SerializerToken!(false);
309 
310 
311 //-----------------------------------------
312 //--- Tests
313 //-----------------------------------------
314 import mutils.container.vector;
315 // test formating
316 // test customVector of char serialization
317 unittest{
318 	static struct TestStruct{
319 		struct Project
320 		{
321 			Vector!char path;
322 			int ccc;
323 		}
324 		
325 		Vector!Project projects;
326 	}
327 	TestStruct test;
328 	TestStruct.Project p1;
329 	TestStruct.Project p2;
330 	
331 	p1.path~=['a', 'a', 'a', ' ', 'a', 'a', 'a'];
332 	p1.ccc=100;
333 	p2.path~=['d', 'd', 'd'];
334 	p2.ccc=200;
335 	test.projects~=p1;
336 	test.projects~=p2;
337 	Vector!TokenData tokens;
338 	
339 	//save
340 	__gshared static LuaSerializerToken serializer= new LuaSerializerToken();
341 	serializer.serialize!(Load.no)(test,tokens);
342 	
343 	//Vector!char vv;
344 	//tokensToCharVectorPreatyPrint!(LuaLexer)(tokens[], vv);
345 	//writeln(vv[]);
346 	
347 	//reset var
348 	test=TestStruct.init;
349 	
350 	//load
351 	serializer.serialize!(Load.yes)(test,tokens[]);
352 	assert(test.projects.length==2);
353 	assert(test.projects[0].ccc==100);
354 	assert(test.projects[0].path[]==cast(char[])"aaa aaa");
355 	assert(test.projects[1].ccc==200);
356 	assert(test.projects[1].path[]==cast(char[])"ddd");
357 }