zf

zenflows testing
git clone https://s.sonu.ch/~srfsh/zf.git
Log | Files | Refs | Submodules | README | LICENSE

cow_http_te.erl (12738B)


      1 %% Copyright (c) 2014-2018, Loïc Hoguin <essen@ninenines.eu>
      2 %%
      3 %% Permission to use, copy, modify, and/or distribute this software for any
      4 %% purpose with or without fee is hereby granted, provided that the above
      5 %% copyright notice and this permission notice appear in all copies.
      6 %%
      7 %% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
      8 %% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
      9 %% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
     10 %% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
     11 %% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
     12 %% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
     13 %% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
     14 
     15 -module(cow_http_te).
     16 
     17 %% Identity.
     18 -export([stream_identity/2]).
     19 -export([identity/1]).
     20 
     21 %% Chunked.
     22 -export([stream_chunked/2]).
     23 -export([chunk/1]).
     24 -export([last_chunk/0]).
     25 
     26 %% The state type is the same for both identity and chunked.
     27 -type state() :: {non_neg_integer(), non_neg_integer()}.
     28 -export_type([state/0]).
     29 
     30 -type decode_ret() :: more
     31 	| {more, Data::binary(), state()}
     32 	| {more, Data::binary(), RemLen::non_neg_integer(), state()}
     33 	| {more, Data::binary(), Rest::binary(), state()}
     34 	| {done, HasTrailers::trailers | no_trailers, Rest::binary()}
     35 	| {done, Data::binary(), HasTrailers::trailers | no_trailers, Rest::binary()}.
     36 -export_type([decode_ret/0]).
     37 
     38 -include("cow_parse.hrl").
     39 
     40 -ifdef(TEST).
     41 dripfeed(<< C, Rest/bits >>, Acc, State, F) ->
     42 	case F(<< Acc/binary, C >>, State) of
     43 		more ->
     44 			dripfeed(Rest, << Acc/binary, C >>, State, F);
     45 		{more, _, State2} ->
     46 			dripfeed(Rest, <<>>, State2, F);
     47 		{more, _, Length, State2} when is_integer(Length) ->
     48 			dripfeed(Rest, <<>>, State2, F);
     49 		{more, _, Acc2, State2} ->
     50 			dripfeed(Rest, Acc2, State2, F);
     51 		{done, _, <<>>} ->
     52 			ok;
     53 		{done, _, _, <<>>} ->
     54 			ok
     55 	end.
     56 -endif.
     57 
     58 %% Identity.
     59 
     60 %% @doc Decode an identity stream.
     61 
     62 -spec stream_identity(Data, State)
     63 	-> {more, Data, Len, State} | {done, Data, Len, Data}
     64 	when Data::binary(), State::state(), Len::non_neg_integer().
     65 stream_identity(Data, {Streamed, Total}) ->
     66 	Streamed2 = Streamed + byte_size(Data),
     67 	if
     68 		Streamed2 < Total ->
     69 			{more, Data, Total - Streamed2, {Streamed2, Total}};
     70 		true ->
     71 			Size = Total - Streamed,
     72 			<< Data2:Size/binary, Rest/bits >> = Data,
     73 			{done, Data2, Total, Rest}
     74 	end.
     75 
     76 -spec identity(Data) -> Data when Data::iodata().
     77 identity(Data) ->
     78 	Data.
     79 
     80 -ifdef(TEST).
     81 stream_identity_test() ->
     82 	{done, <<>>, 0, <<>>}
     83 		= stream_identity(identity(<<>>), {0, 0}),
     84 	{done, <<"\r\n">>, 2, <<>>}
     85 		= stream_identity(identity(<<"\r\n">>), {0, 2}),
     86 	{done, << 0:80000 >>, 10000, <<>>}
     87 		= stream_identity(identity(<< 0:80000 >>), {0, 10000}),
     88 	ok.
     89 
     90 stream_identity_parts_test() ->
     91 	{more, << 0:8000 >>, 1999, S1}
     92 		= stream_identity(<< 0:8000 >>, {0, 2999}),
     93 	{more, << 0:8000 >>, 999, S2}
     94 		= stream_identity(<< 0:8000 >>, S1),
     95 	{done, << 0:7992 >>, 2999, <<>>}
     96 		= stream_identity(<< 0:7992 >>, S2),
     97 	ok.
     98 
     99 %% Using the same data as the chunked one for comparison.
    100 horse_stream_identity() ->
    101 	horse:repeat(10000,
    102 		stream_identity(<<
    103 			"4\r\n"
    104 			"Wiki\r\n"
    105 			"5\r\n"
    106 			"pedia\r\n"
    107 			"e\r\n"
    108 			" in\r\n\r\nchunks.\r\n"
    109 			"0\r\n"
    110 			"\r\n">>, {0, 43})
    111 	).
    112 
    113 horse_stream_identity_dripfeed() ->
    114 	horse:repeat(10000,
    115 		dripfeed(<<
    116 			"4\r\n"
    117 			"Wiki\r\n"
    118 			"5\r\n"
    119 			"pedia\r\n"
    120 			"e\r\n"
    121 			" in\r\n\r\nchunks.\r\n"
    122 			"0\r\n"
    123 			"\r\n">>, <<>>, {0, 43}, fun stream_identity/2)
    124 	).
    125 -endif.
    126 
    127 %% Chunked.
    128 
    129 %% @doc Decode a chunked stream.
    130 
    131 -spec stream_chunked(Data, State)
    132 	-> more | {more, Data, State} | {more, Data, non_neg_integer(), State}
    133 	| {more, Data, Data, State}
    134 	| {done, HasTrailers, Data} | {done, Data, HasTrailers, Data}
    135 	when Data::binary(), State::state(), HasTrailers::trailers | no_trailers.
    136 stream_chunked(Data, State) ->
    137 	stream_chunked(Data, State, <<>>).
    138 
    139 %% New chunk.
    140 stream_chunked(Data = << C, _/bits >>, {0, Streamed}, Acc) when C =/= $\r ->
    141 	case chunked_len(Data, Streamed, Acc, 0) of
    142 		{next, Rest, State, Acc2} ->
    143 			stream_chunked(Rest, State, Acc2);
    144 		{more, State, Acc2} ->
    145 			{more, Acc2, Data, State};
    146 		Ret ->
    147 			Ret
    148 	end;
    149 %% Trailing \r\n before next chunk.
    150 stream_chunked(<< "\r\n", Rest/bits >>, {2, Streamed}, Acc) ->
    151 	stream_chunked(Rest, {0, Streamed}, Acc);
    152 %% Trailing \r before next chunk.
    153 stream_chunked(<< "\r" >>, {2, Streamed}, Acc) ->
    154 	{more, Acc, {1, Streamed}};
    155 %% Trailing \n before next chunk.
    156 stream_chunked(<< "\n", Rest/bits >>, {1, Streamed}, Acc) ->
    157 	stream_chunked(Rest, {0, Streamed}, Acc);
    158 %% More data needed.
    159 stream_chunked(<<>>, State = {Rem, _}, Acc) ->
    160 	{more, Acc, Rem, State};
    161 %% Chunk data.
    162 stream_chunked(Data, {Rem, Streamed}, Acc) when Rem > 2 ->
    163 	DataSize = byte_size(Data),
    164 	RemSize = Rem - 2,
    165 	case Data of
    166 		<< Chunk:RemSize/binary, "\r\n", Rest/bits >> ->
    167 			stream_chunked(Rest, {0, Streamed + RemSize}, << Acc/binary, Chunk/binary >>);
    168 		<< Chunk:RemSize/binary, "\r" >> ->
    169 			{more, << Acc/binary, Chunk/binary >>, {1, Streamed + RemSize}};
    170 		%% Everything in Data is part of the chunk. If we have more
    171 		%% data than the chunk accepts, then this is an error and we crash.
    172 		_ when DataSize =< RemSize ->
    173 			Rem2 = Rem - DataSize,
    174 			{more, << Acc/binary, Data/binary >>, Rem2, {Rem2, Streamed + DataSize}}
    175 	end.
    176 
    177 chunked_len(<< $0, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16);
    178 chunked_len(<< $1, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 1);
    179 chunked_len(<< $2, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 2);
    180 chunked_len(<< $3, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 3);
    181 chunked_len(<< $4, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 4);
    182 chunked_len(<< $5, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 5);
    183 chunked_len(<< $6, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 6);
    184 chunked_len(<< $7, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 7);
    185 chunked_len(<< $8, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 8);
    186 chunked_len(<< $9, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 9);
    187 chunked_len(<< $A, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10);
    188 chunked_len(<< $B, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11);
    189 chunked_len(<< $C, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12);
    190 chunked_len(<< $D, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13);
    191 chunked_len(<< $E, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14);
    192 chunked_len(<< $F, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15);
    193 chunked_len(<< $a, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 10);
    194 chunked_len(<< $b, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 11);
    195 chunked_len(<< $c, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 12);
    196 chunked_len(<< $d, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 13);
    197 chunked_len(<< $e, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 14);
    198 chunked_len(<< $f, R/bits >>, S, A, Len) -> chunked_len(R, S, A, Len * 16 + 15);
    199 %% Chunk extensions.
    200 %%
    201 %% Note that we currently skip the first character we encounter here,
    202 %% and not in the skip_chunk_ext function. If we latter implement
    203 %% chunk extensions (unlikely) we will need to change this clause too.
    204 chunked_len(<< C, R/bits >>, S, A, Len) when ?IS_WS(C); C =:= $; -> skip_chunk_ext(R, S, A, Len, 0);
    205 %% Final chunk.
    206 %%
    207 %% When trailers are following we simply return them as the Rest.
    208 %% Then the user code can decide to call the stream_trailers function
    209 %% to parse them. The user can therefore ignore trailers as necessary
    210 %% if they do not wish to handle them.
    211 chunked_len(<< "\r\n\r\n", R/bits >>, _, <<>>, 0) -> {done, no_trailers, R};
    212 chunked_len(<< "\r\n\r\n", R/bits >>, _, A, 0) -> {done, A, no_trailers, R};
    213 chunked_len(<< "\r\n", R/bits >>, _, <<>>, 0) when byte_size(R) > 2 -> {done, trailers, R};
    214 chunked_len(<< "\r\n", R/bits >>, _, A, 0) when byte_size(R) > 2 -> {done, A, trailers, R};
    215 chunked_len(_, _, _, 0) -> more;
    216 %% Normal chunk. Add 2 to Len for the trailing \r\n.
    217 chunked_len(<< "\r\n", R/bits >>, S, A, Len) -> {next, R, {Len + 2, S}, A};
    218 chunked_len(<<"\r">>, _, <<>>, _) -> more;
    219 chunked_len(<<"\r">>, S, A, _) -> {more, {0, S}, A};
    220 chunked_len(<<>>, _, <<>>, _) -> more;
    221 chunked_len(<<>>, S, A, _) -> {more, {0, S}, A}.
    222 
    223 skip_chunk_ext(R = << "\r", _/bits >>, S, A, Len, _) -> chunked_len(R, S, A, Len);
    224 skip_chunk_ext(R = <<>>, S, A, Len, _) -> chunked_len(R, S, A, Len);
    225 %% We skip up to 128 characters of chunk extensions. The value
    226 %% is hardcoded: chunk extensions are very rarely seen in the
    227 %% wild and Cowboy doesn't do anything with them anyway.
    228 %%
    229 %% Line breaks are not allowed in the middle of chunk extensions.
    230 skip_chunk_ext(<< C, R/bits >>, S, A, Len, Skipped) when C =/= $\n, Skipped < 128 ->
    231 	skip_chunk_ext(R, S, A, Len, Skipped + 1).
    232 
    233 %% @doc Encode a chunk.
    234 
    235 -spec chunk(D) -> D when D::iodata().
    236 chunk(Data) ->
    237 	[integer_to_list(iolist_size(Data), 16), <<"\r\n">>,
    238 		Data, <<"\r\n">>].
    239 
    240 %% @doc Encode the last chunk of a chunked stream.
    241 
    242 -spec last_chunk() -> << _:40 >>.
    243 last_chunk() ->
    244 	<<"0\r\n\r\n">>.
    245 
    246 -ifdef(TEST).
    247 stream_chunked_identity_test() ->
    248 	{done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>}
    249 		= stream_chunked(iolist_to_binary([
    250 			chunk("Wiki"),
    251 			chunk("pedia"),
    252 			chunk(" in\r\n\r\nchunks."),
    253 			last_chunk()
    254 		]), {0, 0}),
    255 	ok.
    256 
    257 stream_chunked_one_pass_test() ->
    258 	{done, no_trailers, <<>>} = stream_chunked(<<"0\r\n\r\n">>, {0, 0}),
    259 	{done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>}
    260 		= stream_chunked(<<
    261 			"4\r\n"
    262 			"Wiki\r\n"
    263 			"5\r\n"
    264 			"pedia\r\n"
    265 			"e\r\n"
    266 			" in\r\n\r\nchunks.\r\n"
    267 			"0\r\n"
    268 			"\r\n">>, {0, 0}),
    269 	%% Same but with extra spaces or chunk extensions.
    270 	{done, <<"Wikipedia in\r\n\r\nchunks.">>, no_trailers, <<>>}
    271 		= stream_chunked(<<
    272 			"4 \r\n"
    273 			"Wiki\r\n"
    274 			"5 ; ext = abc\r\n"
    275 			"pedia\r\n"
    276 			"e;ext=abc\r\n"
    277 			" in\r\n\r\nchunks.\r\n"
    278 			"0;ext\r\n"
    279 			"\r\n">>, {0, 0}),
    280 	%% Same but with trailers.
    281 	{done, <<"Wikipedia in\r\n\r\nchunks.">>, trailers, Rest}
    282 		= stream_chunked(<<
    283 			"4\r\n"
    284 			"Wiki\r\n"
    285 			"5\r\n"
    286 			"pedia\r\n"
    287 			"e\r\n"
    288 			" in\r\n\r\nchunks.\r\n"
    289 			"0\r\n"
    290 			"x-foo-bar: bar foo\r\n"
    291 			"\r\n">>, {0, 0}),
    292 	{[{<<"x-foo-bar">>, <<"bar foo">>}], <<>>} = cow_http:parse_headers(Rest),
    293 	ok.
    294 
    295 stream_chunked_n_passes_test() ->
    296 	S0 = {0, 0},
    297 	more = stream_chunked(<<"4\r">>, S0),
    298 	{more, <<>>, 6, S1} = stream_chunked(<<"4\r\n">>, S0),
    299 	{more, <<"Wiki">>, 0, S2} = stream_chunked(<<"Wiki\r\n">>, S1),
    300 	{more, <<"pedia">>, <<"e\r">>, S3} = stream_chunked(<<"5\r\npedia\r\ne\r">>, S2),
    301 	{more, <<" in\r\n\r\nchunks.">>, 2, S4} = stream_chunked(<<"e\r\n in\r\n\r\nchunks.">>, S3),
    302 	{done, no_trailers, <<>>} = stream_chunked(<<"\r\n0\r\n\r\n">>, S4),
    303 	%% A few extra for coverage purposes.
    304 	more = stream_chunked(<<"\n3">>, {1, 0}),
    305 	{more, <<"abc">>, 2, {2, 3}} = stream_chunked(<<"\n3\r\nabc">>, {1, 0}),
    306 	{more, <<"abc">>, {1, 3}} = stream_chunked(<<"3\r\nabc\r">>, {0, 0}),
    307 	{more, <<"abc">>, <<"123">>, {0, 3}} = stream_chunked(<<"3\r\nabc\r\n123">>, {0, 0}),
    308 	ok.
    309 
    310 stream_chunked_dripfeed_test() ->
    311 	dripfeed(<<
    312 		"4\r\n"
    313 		"Wiki\r\n"
    314 		"5\r\n"
    315 		"pedia\r\n"
    316 		"e\r\n"
    317 		" in\r\n\r\nchunks.\r\n"
    318 		"0\r\n"
    319 		"\r\n">>, <<>>, {0, 0}, fun stream_chunked/2).
    320 
    321 do_body_to_chunks(_, <<>>, Acc) ->
    322 	lists:reverse([<<"0\r\n\r\n">>|Acc]);
    323 do_body_to_chunks(ChunkSize, Body, Acc) ->
    324 	BodySize = byte_size(Body),
    325 	ChunkSize2 = case BodySize < ChunkSize of
    326 		true -> BodySize;
    327 		false -> ChunkSize
    328 	end,
    329 	<< Chunk:ChunkSize2/binary, Rest/binary >> = Body,
    330 	ChunkSizeBin = list_to_binary(integer_to_list(ChunkSize2, 16)),
    331 	do_body_to_chunks(ChunkSize, Rest,
    332 		[<< ChunkSizeBin/binary, "\r\n", Chunk/binary, "\r\n" >>|Acc]).
    333 
    334 stream_chunked_dripfeed2_test() ->
    335 	Body = list_to_binary(io_lib:format("~p", [lists:seq(1, 100)])),
    336 	Body2 = iolist_to_binary(do_body_to_chunks(50, Body, [])),
    337 	dripfeed(Body2, <<>>, {0, 0}, fun stream_chunked/2).
    338 
    339 stream_chunked_error_test_() ->
    340 	Tests = [
    341 		{<<>>, undefined},
    342 		{<<"\n\naaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa">>, {2, 0}}
    343 	],
    344 	[{lists:flatten(io_lib:format("value ~p state ~p", [V, S])),
    345 		fun() -> {'EXIT', _} = (catch stream_chunked(V, S)) end}
    346 			|| {V, S} <- Tests].
    347 
    348 horse_stream_chunked() ->
    349 	horse:repeat(10000,
    350 		stream_chunked(<<
    351 			"4\r\n"
    352 			"Wiki\r\n"
    353 			"5\r\n"
    354 			"pedia\r\n"
    355 			"e\r\n"
    356 			" in\r\n\r\nchunks.\r\n"
    357 			"0\r\n"
    358 			"\r\n">>, {0, 0})
    359 	).
    360 
    361 horse_stream_chunked_dripfeed() ->
    362 	horse:repeat(10000,
    363 		dripfeed(<<
    364 			"4\r\n"
    365 			"Wiki\r\n"
    366 			"5\r\n"
    367 			"pedia\r\n"
    368 			"e\r\n"
    369 			" in\r\n\r\nchunks.\r\n"
    370 			"0\r\n"
    371 			"\r\n">>, <<>>, {0, 43}, fun stream_chunked/2)
    372 	).
    373 -endif.